Codemod asserts to assertxs in the runtime
[hiphop-php.git] / hphp / runtime / vm / bytecode.h
blobb24bdf8bd360695b5d96a148f93422e09639c0aa
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #ifndef incl_HPHP_VM_BYTECODE_H_
18 #define incl_HPHP_VM_BYTECODE_H_
20 #include "hphp/runtime/base/array-iterator.h"
21 #include "hphp/runtime/base/rds.h"
22 #include "hphp/runtime/base/rds-util.h"
23 #include "hphp/runtime/base/tv-arith.h"
24 #include "hphp/runtime/base/tv-conversions.h"
25 #include "hphp/runtime/base/tv-mutate.h"
26 #include "hphp/runtime/base/tv-variant.h"
27 #include "hphp/runtime/base/tv-refcount.h"
28 #include "hphp/runtime/vm/act-rec.h"
29 #include "hphp/runtime/vm/class.h"
30 #include "hphp/runtime/vm/func.h"
31 #include "hphp/runtime/vm/name-value-table.h"
32 #include "hphp/runtime/vm/unit.h"
34 #include "hphp/runtime/vm/jit/types.h"
36 #include "hphp/util/arena.h"
37 #include "hphp/util/type-traits.h"
39 #include <type_traits>
41 namespace HPHP {
42 ///////////////////////////////////////////////////////////////////////////////
44 struct ActRec;
45 struct Func;
46 struct Resumable;
48 ///////////////////////////////////////////////////////////////////////////////
50 #define EVAL_FILENAME_SUFFIX ") : eval()'d code"
52 // perform the set(op) operation on lhs & rhs, leaving the result in lhs.
53 // The old value of lhs is decrefed. Caller must call tvToCell() if lhs or
54 // rhs might be a ref.
55 ALWAYS_INLINE
56 void setopBody(Cell* lhs, SetOpOp op, Cell* rhs) {
57 assertx(cellIsPlausible(*lhs));
58 assertx(cellIsPlausible(*rhs));
60 switch (op) {
61 case SetOpOp::PlusEqual: cellAddEq(*lhs, *rhs); return;
62 case SetOpOp::MinusEqual: cellSubEq(*lhs, *rhs); return;
63 case SetOpOp::MulEqual: cellMulEq(*lhs, *rhs); return;
64 case SetOpOp::DivEqual: cellDivEq(*lhs, *rhs); return;
65 case SetOpOp::PowEqual: cellPowEq(*lhs, *rhs); return;
66 case SetOpOp::ModEqual: cellModEq(*lhs, *rhs); return;
67 case SetOpOp::ConcatEqual: cellConcatEq(*lhs, *rhs); return;
68 case SetOpOp::AndEqual: cellBitAndEq(*lhs, *rhs); return;
69 case SetOpOp::OrEqual: cellBitOrEq(*lhs, *rhs); return;
70 case SetOpOp::XorEqual: cellBitXorEq(*lhs, *rhs); return;
71 case SetOpOp::SlEqual: cellShlEq(*lhs, *rhs); return;
72 case SetOpOp::SrEqual: cellShrEq(*lhs, *rhs); return;
73 case SetOpOp::PlusEqualO: cellAddEqO(*lhs, *rhs); return;
74 case SetOpOp::MinusEqualO: cellSubEqO(*lhs, *rhs); return;
75 case SetOpOp::MulEqualO: cellMulEqO(*lhs, *rhs); return;
77 not_reached();
80 ///////////////////////////////////////////////////////////////////////////////
82 struct ExtraArgs {
83 ExtraArgs(const ExtraArgs&) = delete;
84 ExtraArgs& operator=(const ExtraArgs&) = delete;
87 * Allocate an ExtraArgs structure, with arguments copied from the
88 * evaluation stack. This takes ownership of the args without
89 * adjusting reference counts, so they must be discarded from the
90 * stack.
92 static ExtraArgs* allocateCopy(TypedValue* args, unsigned nargs);
95 * Allocate an ExtraArgs, without initializing any of the arguments.
96 * All arguments must be initialized via getExtraArg before
97 * deallocate() is called for the returned pointer.
99 static ExtraArgs* allocateUninit(unsigned nargs);
102 * Deallocate an extraArgs structure. Either use the one that
103 * exists in a ActRec, or do it explicitly.
105 static void deallocate(ActRec*);
106 static void deallocate(ExtraArgs*, unsigned numArgs);
108 // Just free the memory, don't dec-ref anything.
109 static void deallocateRaw(ExtraArgs*);
112 * Make a copy of ExtraArgs.
114 ExtraArgs* clone(ActRec* fp) const;
117 * Get the slot for extra arg i, where i = argNum - func->numParams.
119 TypedValue* getExtraArg(unsigned argInd) const;
121 private:
122 ExtraArgs();
123 ~ExtraArgs();
125 static void* allocMem(unsigned nargs);
127 private:
128 TypedValue m_extraArgs[0];
129 TYPE_SCAN_FLEXIBLE_ARRAY_FIELD(m_extraArgs);
133 * Variable environment.
135 * A variable environment consists of the locals for the current function
136 * (either pseudo-main, global function, or method), plus any variables that
137 * are dynamically defined.
139 * Logically, a global function or method starts off with a variable
140 * environment that contains only its locals, but a pseudo-main is handed
141 * its caller's existing variable environment. Generally, however, we don't
142 * create a variable environment for global functions or methods until it
143 * actually needs one (i.e. if it is about to include a pseudo-main, or if
144 * it uses dynamic variable lookups).
146 * Named locals always appear in the expected place on the stack, even after
147 * a VarEnv is attached. Internally uses a NameValueTable to hook up names to
148 * the local locations.
150 struct VarEnv {
151 private:
152 NameValueTable m_nvTable;
153 ExtraArgs* m_extraArgs;
154 uint16_t m_depth;
155 const bool m_global;
157 public:
158 explicit VarEnv();
159 explicit VarEnv(ActRec* fp, ExtraArgs* eArgs);
160 explicit VarEnv(const VarEnv* varEnv, ActRec* fp);
161 ~VarEnv();
163 // Free the VarEnv and locals for the given frame
164 // which must have a VarEnv
165 static void deallocate(ActRec* fp);
167 // Allocates a local VarEnv and attaches it to the existing FP.
168 static VarEnv* createLocal(ActRec* fp);
170 // Allocate a global VarEnv. Initially not attached to any frame.
171 static void createGlobal();
173 VarEnv* clone(ActRec* fp) const;
175 void suspend(const ActRec* oldFP, ActRec* newFP);
176 void enterFP(ActRec* oldFP, ActRec* newFP);
177 void exitFP(ActRec* fp);
179 void set(const StringData* name, const TypedValue* tv);
180 void bind(const StringData* name, TypedValue* tv);
181 void setWithRef(const StringData* name, TypedValue* tv);
182 TypedValue* lookup(const StringData* name);
183 TypedValue* lookupAdd(const StringData* name);
184 bool unset(const StringData* name);
186 Array getDefinedVariables() const;
188 // Used for save/store m_cfp for debugger
189 ActRec* getFP() const { return m_nvTable.getFP(); }
190 bool isGlobalScope() const { return m_global; }
192 // Access to wrapped ExtraArgs, if we have one.
193 TypedValue* getExtraArg(unsigned argInd) const;
197 * Action taken to handle any extra arguments passed for a function call.
199 enum class ExtraArgsAction {
200 None, // no extra arguments; zero out m_extraArgs
201 Discard, // discard extra arguments
202 Variadic, // populate `...$args' parameter
203 MayUseVV, // create ExtraArgs
204 VarAndVV, // both of the above
207 inline ExtraArgsAction extra_args_action(const Func* func, uint32_t argc) {
208 using Action = ExtraArgsAction;
210 auto const nparams = func->numNonVariadicParams();
211 if (argc <= nparams) return Action::None;
213 if (LIKELY(func->discardExtraArgs())) {
214 return Action::Discard;
216 if (func->attrs() & AttrMayUseVV) {
217 return func->hasVariadicCaptureParam() ? Action::VarAndVV
218 : Action::MayUseVV;
220 assertx(func->hasVariadicCaptureParam());
221 return Action::Variadic;
224 ///////////////////////////////////////////////////////////////////////////////
227 * Returns true iff ar represents a frame on the VM eval stack or a Resumable
228 * object on the PHP heap.
230 bool isVMFrame(const ActRec* ar);
233 * Returns true iff the given address is one of the special debugger return
234 * helpers.
236 bool isDebuggerReturnHelper(void* addr);
239 * If ar->m_savedRip points somewhere in the TC that is not a return helper,
240 * change it to point to an appropriate return helper. The two different
241 * versions are for the different needs of the C++ unwinder and debugger hooks,
242 * respectively.
244 void unwindPreventReturnToTC(ActRec* ar);
245 void debuggerPreventReturnToTC(ActRec* ar);
248 * Call debuggerPreventReturnToTC() on all live VM frames in this thread.
250 void debuggerPreventReturnsToTC();
252 ///////////////////////////////////////////////////////////////////////////////
254 inline int32_t arOffset(const ActRec* ar, const ActRec* other) {
255 return (intptr_t(other) - intptr_t(ar)) / sizeof(TypedValue);
258 inline ActRec* arAtOffset(const ActRec* ar, int32_t offset) {
259 return (ActRec*)(intptr_t(ar) + intptr_t(offset * sizeof(TypedValue)));
262 inline ActRec* arFromSpOffset(const ActRec *sp, int32_t offset) {
263 return arAtOffset(sp, offset);
266 void frame_free_locals_no_hook(ActRec* fp);
268 #define arReturn(a, x) \
269 ([&] { \
270 ActRec* ar_ = (a); \
271 TypedValue val_; \
272 new (&val_) Variant(x); \
273 frame_free_locals_no_hook(ar_); \
274 tvCopy(val_, *ar_->retSlot()); \
275 return ar_->retSlot(); \
276 }())
278 #define tvReturn(x) \
279 ([&] { \
280 TypedValue val_; \
281 new (&val_) Variant(x); \
282 assertx(val_.m_type != KindOfRef && val_.m_type != KindOfUninit); \
283 return val_; \
284 }())
286 template <bool crossBuiltin> Class* arGetContextClassImpl(const ActRec* ar);
287 template <> Class* arGetContextClassImpl<true>(const ActRec* ar);
288 template <> Class* arGetContextClassImpl<false>(const ActRec* ar);
289 inline Class* arGetContextClass(const ActRec* ar) {
290 return arGetContextClassImpl<false>(ar);
292 inline Class* arGetContextClassFromBuiltin(const ActRec* ar) {
293 return arGetContextClassImpl<true>(ar);
296 ///////////////////////////////////////////////////////////////////////////////
298 // Used by extension functions that take a PHP "callback", since they need to
299 // figure out the callback context once and call it multiple times. (e.g.
300 // array_map, array_filter, ...)
301 struct CallCtx {
302 const Func* func;
303 ObjectData* this_;
304 Class* cls;
305 StringData* invName;
306 bool dynamic;
309 constexpr size_t kNumIterCells = sizeof(Iter) / sizeof(Cell);
310 constexpr size_t kNumActRecCells = sizeof(ActRec) / sizeof(Cell);
312 constexpr size_t clsRefCountToCells(size_t n) {
313 return (n * sizeof(LowPtr<Class>*) + sizeof(Cell) - 1) / sizeof(Cell);
316 ///////////////////////////////////////////////////////////////////////////////
319 * We pad all stack overflow checks by a small amount to allow for three
320 * things:
322 * - inlining functions without having to either do another stack
323 * check (or chase down prologues to smash checks to be bigger).
325 * - omitting stack overflow checks on leaf functions
327 * - delaying stack overflow checks on reentry
329 constexpr int kStackCheckLeafPadding = 20;
330 constexpr int kStackCheckReenterPadding = 9;
331 constexpr int kStackCheckPadding = kStackCheckLeafPadding +
332 kStackCheckReenterPadding;
334 constexpr int kInvalidRaiseLevel = -1;
335 constexpr int kInvalidNesting = -1;
337 struct Fault {
338 explicit Fault()
339 : m_raiseNesting(kInvalidNesting),
340 m_raiseFrame(nullptr),
341 m_raiseOffset(kInvalidOffset),
342 m_handledCount(0) {}
344 ObjectData* m_userException;
346 // The VM nesting at the moment where the exception was thrown.
347 int m_raiseNesting;
348 // The frame where the exception was thrown.
349 ActRec* m_raiseFrame;
350 // The offset within the frame where the exception was thrown.
351 // This value is updated when a fault is updated when exception
352 // chaining takes place. In this case the raise offset of the newly
353 // thrown exception is set to the offset of the previously thrown
354 // exception. The offset is also updated when the exception
355 // propagates outside its current frame.
356 Offset m_raiseOffset;
357 // The number of EHs that were already examined for this exception.
358 // This is used to ensure that the same exception handler is not
359 // run twice for the same exception. The unwinder may be entered
360 // multiple times for the same fault as a result of calling Unwind.
361 // The field is used to skip through the EHs that were already run.
362 int m_handledCount;
365 // Interpreter evaluation stack.
366 struct Stack {
367 private:
368 TypedValue* m_elms;
369 TypedValue* m_top;
370 TypedValue* m_base; // Stack grows down, so m_base is beyond the end of
371 // m_elms.
373 public:
374 bool isAllocated() { return m_elms != nullptr; }
375 void* getStackLowAddress() const { return m_elms; }
376 void* getStackHighAddress() const { return m_base; }
377 bool isValidAddress(uintptr_t v) {
378 return v >= uintptr_t(m_elms) && v < uintptr_t(m_base);
380 void requestInit();
381 void requestExit();
383 static const int sSurprisePageSize;
384 static const unsigned sMinStackElms;
385 static void ValidateStackSize();
386 Stack();
387 ~Stack();
389 std::string toString(const ActRec* fp, int offset,
390 std::string prefix="") const;
392 bool wouldOverflow(int numCells) const;
395 * top --
396 * topOfStackOffset --
398 * Accessors for the x64 translator. Do not play on or around.
400 TypedValue*& top() {
401 return m_top;
404 static constexpr size_t topOfStackOffset() {
405 return offsetof(Stack, m_top);
408 static TypedValue* anyFrameStackBase(const ActRec* fp);
409 static TypedValue* frameStackBase(const ActRec* fp);
410 static TypedValue* resumableStackBase(const ActRec* fp);
412 ALWAYS_INLINE
413 size_t count() const {
414 return ((uintptr_t)m_base - (uintptr_t)m_top) / sizeof(TypedValue);
417 // Same as discard(), but meant to replace popC() iff the interpreter knows
418 // for certain that decrementing a refcount is unnecessary.
419 ALWAYS_INLINE
420 void popX() {
421 assertx(m_top != m_base);
422 assertx(!isRefcountedType(m_top->m_type));
423 tvDebugTrash(m_top);
424 m_top++;
427 ALWAYS_INLINE
428 void popC() {
429 assertx(m_top != m_base);
430 assertx(cellIsPlausible(*m_top));
431 tvDecRefGen(m_top);
432 tvDebugTrash(m_top);
433 m_top++;
436 ALWAYS_INLINE
437 void popV() {
438 assertx(m_top != m_base);
439 assertx(refIsPlausible(*m_top));
440 tvDecRefRef(m_top);
441 tvDebugTrash(m_top);
442 m_top++;
445 ALWAYS_INLINE
446 void popU() {
447 assertx(m_top != m_base);
448 assertx(m_top->m_type == KindOfUninit);
449 tvDebugTrash(m_top);
450 ++m_top;
453 ALWAYS_INLINE
454 void popTV() {
455 assertx(m_top != m_base);
456 assertx(tvIsPlausible(*m_top));
457 tvDecRefGen(m_top);
458 tvDebugTrash(m_top);
459 m_top++;
462 // popAR() should only be used to tear down a pre-live ActRec. Once
463 // an ActRec is live, it should be torn down using frame_free_locals()
464 // followed by discardAR() or ret().
465 ALWAYS_INLINE
466 void popAR() {
467 assertx(m_top != m_base);
468 ActRec* ar = (ActRec*)m_top;
469 if (ar->func()->cls() && ar->hasThis()) decRefObj(ar->getThis());
470 if (ar->magicDispatch()) {
471 decRefStr(ar->getInvName());
473 discardAR();
476 ALWAYS_INLINE
477 void discardAR() {
478 assertx(m_top != m_base);
479 if (debug) {
480 for (int i = 0; i < kNumActRecCells; ++i) {
481 tvDebugTrash(m_top + i);
484 m_top += kNumActRecCells;
485 assertx((uintptr_t)m_top <= (uintptr_t)m_base);
488 ALWAYS_INLINE
489 void ret() {
490 // Leave part of the activation on the stack, since the return value now
491 // resides there.
492 if (debug) {
493 for (int i = 0; i < kNumActRecCells - 1; ++i) {
494 tvDebugTrash(m_top + i);
497 m_top += kNumActRecCells - 1;
498 assertx((uintptr_t)m_top <= (uintptr_t)m_base);
501 ALWAYS_INLINE
502 void discard() {
503 assertx(m_top != m_base);
504 tvDebugTrash(m_top);
505 m_top++;
508 ALWAYS_INLINE
509 void ndiscard(size_t n) {
510 assertx((uintptr_t)&m_top[n] <= (uintptr_t)m_base);
511 if (debug) {
512 for (int i = 0; i < n; ++i) {
513 tvDebugTrash(m_top + i);
516 m_top += n;
519 ALWAYS_INLINE
520 void trim(Cell* c) {
521 assertx(c <= m_base);
522 assertx(m_top <= c);
523 if (debug) {
524 while (m_top < c) tvDebugTrash(m_top++);
525 } else {
526 m_top = c;
530 ALWAYS_INLINE
531 void dup() {
532 assertx(m_top != m_base);
533 assertx(m_top != m_elms);
534 assertx(m_top->m_type != KindOfRef);
535 Cell* fr = m_top;
536 m_top--;
537 Cell* to = m_top;
538 cellDup(*fr, *to);
541 ALWAYS_INLINE
542 void box() {
543 assertx(m_top != m_base);
544 assertx(m_top->m_type != KindOfRef);
545 tvBox(*m_top);
548 ALWAYS_INLINE
549 void unbox() {
550 assertx(m_top != m_base);
551 tvUnbox(*m_top);
554 ALWAYS_INLINE
555 void pushUninit() {
556 assertx(m_top != m_elms);
557 m_top--;
558 tvWriteUninit(*m_top);
561 ALWAYS_INLINE
562 void pushNull() {
563 assertx(m_top != m_elms);
564 m_top--;
565 tvWriteNull(*m_top);
568 ALWAYS_INLINE
569 void pushNullUninit() {
570 assertx(m_top != m_elms);
571 m_top--;
572 m_top->m_data.num = 0;
573 m_top->m_type = KindOfUninit;
576 template<DataType t, class T> void pushVal(T v) {
577 assertx(m_top != m_elms);
578 m_top--;
579 *m_top = make_tv<t>(v);
581 ALWAYS_INLINE void pushBool(bool v) { pushVal<KindOfBoolean>(v); }
582 ALWAYS_INLINE void pushInt(int64_t v) { pushVal<KindOfInt64>(v); }
583 ALWAYS_INLINE void pushDouble(double v) { pushVal<KindOfDouble>(v); }
585 // This should only be called directly when the caller has
586 // already adjusted the refcount appropriately
587 ALWAYS_INLINE
588 void pushStringNoRc(StringData* s) {
589 assertx(m_top != m_elms);
590 m_top--;
591 *m_top = make_tv<KindOfString>(s);
594 ALWAYS_INLINE
595 void pushStaticString(const StringData* s) {
596 assertx(s->isStatic()); // No need to call s->incRefCount().
597 assertx(m_top != m_elms);
598 m_top--;
599 *m_top = make_tv<KindOfPersistentString>(s);
602 // These should only be called directly when the caller has
603 // already adjusted the refcount appropriately
604 ALWAYS_INLINE
605 void pushArrayNoRc(ArrayData* a) {
606 assertx(a->isPHPArray());
607 assertx(m_top != m_elms);
608 m_top--;
609 *m_top = make_tv<KindOfArray>(a);
612 ALWAYS_INLINE
613 void pushVecNoRc(ArrayData* a) {
614 assertx(a->isVecArray());
615 assertx(m_top != m_elms);
616 m_top--;
617 *m_top = make_tv<KindOfVec>(a);
620 ALWAYS_INLINE
621 void pushDictNoRc(ArrayData* a) {
622 assertx(a->isDict());
623 assertx(m_top != m_elms);
624 m_top--;
625 *m_top = make_tv<KindOfDict>(a);
628 ALWAYS_INLINE
629 void pushKeysetNoRc(ArrayData* a) {
630 assertx(a->isKeyset());
631 assertx(m_top != m_elms);
632 m_top--;
633 *m_top = make_tv<KindOfKeyset>(a);
636 ALWAYS_INLINE
637 void pushArray(ArrayData* a) {
638 assertx(a);
639 pushArrayNoRc(a);
640 a->incRefCount();
643 ALWAYS_INLINE
644 void pushVec(ArrayData* a) {
645 assertx(a);
646 pushVecNoRc(a);
647 a->incRefCount();
650 ALWAYS_INLINE
651 void pushDict(ArrayData* a) {
652 assertx(a);
653 pushDictNoRc(a);
654 a->incRefCount();
657 ALWAYS_INLINE
658 void pushKeyset(ArrayData* a) {
659 assertx(a);
660 pushKeysetNoRc(a);
661 a->incRefCount();
664 ALWAYS_INLINE
665 void pushStaticArray(const ArrayData* a) {
666 assertx(a->isStatic()); // No need to call a->incRefCount().
667 assertx(a->isPHPArray());
668 assertx(m_top != m_elms);
669 m_top--;
670 *m_top = make_tv<KindOfPersistentArray>(a);
673 ALWAYS_INLINE
674 void pushStaticVec(const ArrayData* a) {
675 assertx(a->isStatic()); // No need to call a->incRefCount().
676 assertx(a->isVecArray());
677 assertx(m_top != m_elms);
678 m_top--;
679 *m_top = make_tv<KindOfPersistentVec>(a);
682 ALWAYS_INLINE
683 void pushStaticDict(const ArrayData* a) {
684 assertx(a->isStatic()); // No need to call a->incRefCount().
685 assertx(a->isDict());
686 assertx(m_top != m_elms);
687 m_top--;
688 *m_top = make_tv<KindOfPersistentDict>(a);
691 ALWAYS_INLINE
692 void pushStaticKeyset(const ArrayData* a) {
693 assertx(a->isStatic()); // No need to call a->incRefCount().
694 assertx(a->isKeyset());
695 assertx(m_top != m_elms);
696 m_top--;
697 *m_top = make_tv<KindOfPersistentKeyset>(a);
700 // This should only be called directly when the caller has
701 // already adjusted the refcount appropriately
702 ALWAYS_INLINE
703 void pushObjectNoRc(ObjectData* o) {
704 assertx(m_top != m_elms);
705 m_top--;
706 *m_top = make_tv<KindOfObject>(o);
709 ALWAYS_INLINE
710 void pushObject(ObjectData* o) {
711 pushObjectNoRc(o);
712 o->incRefCount();
715 ALWAYS_INLINE
716 void nalloc(size_t n) {
717 assertx((uintptr_t)(m_top - n) <= (uintptr_t)m_base);
718 m_top -= n;
721 ALWAYS_INLINE
722 Cell* allocC() {
723 assertx(m_top != m_elms);
724 m_top--;
725 return (Cell*)m_top;
728 ALWAYS_INLINE
729 Ref* allocV() {
730 assertx(m_top != m_elms);
731 m_top--;
732 return (Ref*)m_top;
735 ALWAYS_INLINE
736 TypedValue* allocTV() {
737 assertx(m_top != m_elms);
738 m_top--;
739 return m_top;
742 ALWAYS_INLINE
743 ActRec* allocA() {
744 assertx((uintptr_t)(m_top - kNumActRecCells) >= (uintptr_t)m_elms);
745 assertx(kNumActRecCells * sizeof(Cell) == sizeof(ActRec));
746 m_top -= kNumActRecCells;
747 return (ActRec*)m_top;
750 ALWAYS_INLINE
751 void allocI() {
752 assertx(kNumIterCells * sizeof(Cell) == sizeof(Iter));
753 assertx((uintptr_t)(m_top - kNumIterCells) >= (uintptr_t)m_elms);
754 m_top -= kNumIterCells;
757 ALWAYS_INLINE
758 void allocClsRefSlots(size_t n) {
759 assertx((uintptr_t)(m_top - clsRefCountToCells(n)) >= (uintptr_t)m_elms);
760 m_top -= clsRefCountToCells(n);
761 if (debug) {
762 memset(m_top, kTrashClsRef, clsRefCountToCells(n) * sizeof(Cell));
766 ALWAYS_INLINE
767 void replaceC(const Cell c) {
768 assertx(m_top != m_base);
769 assertx(m_top->m_type != KindOfRef);
770 tvDecRefGen(m_top);
771 *m_top = c;
774 template <DataType DT>
775 ALWAYS_INLINE
776 void replaceC() {
777 assertx(m_top != m_base);
778 assertx(m_top->m_type != KindOfRef);
779 tvDecRefGen(m_top);
780 *m_top = make_tv<DT>();
783 template <DataType DT, typename T>
784 ALWAYS_INLINE
785 void replaceC(T value) {
786 assertx(m_top != m_base);
787 assertx(m_top->m_type != KindOfRef);
788 tvDecRefGen(m_top);
789 *m_top = make_tv<DT>(value);
792 ALWAYS_INLINE
793 void replaceTV(const TypedValue& tv) {
794 assertx(m_top != m_base);
795 tvDecRefGen(m_top);
796 *m_top = tv;
799 template <DataType DT>
800 ALWAYS_INLINE
801 void replaceTV() {
802 assertx(m_top != m_base);
803 tvDecRefGen(m_top);
804 *m_top = make_tv<DT>();
807 template <DataType DT, typename T>
808 ALWAYS_INLINE
809 void replaceTV(T value) {
810 assertx(m_top != m_base);
811 tvDecRefGen(m_top);
812 *m_top = make_tv<DT>(value);
815 ALWAYS_INLINE
816 Cell* topC() {
817 assertx(m_top != m_base);
818 return tvAssertCell(m_top);
821 ALWAYS_INLINE
822 Ref* topV() {
823 assertx(m_top != m_base);
824 assertx(m_top->m_type == KindOfRef);
825 return (Ref*)m_top;
828 ALWAYS_INLINE
829 TypedValue* topTV() {
830 assertx(m_top != m_base);
831 return m_top;
834 ALWAYS_INLINE
835 Cell* indC(size_t ind) {
836 assertx(m_top != m_base);
837 assertx(m_top[ind].m_type != KindOfRef);
838 return tvAssertCell(&m_top[ind]);
841 ALWAYS_INLINE
842 TypedValue* indTV(size_t ind) {
843 assertx(m_top != m_base);
844 return &m_top[ind];
848 //////////////////////////////////////////////////////////////////////
851 * Visit all the slots and pre-live ActRecs on a live eval stack,
852 * handling FPI regions and resumables correctly, and stopping when we
853 * reach the supplied activation record.
855 * The stack elements are visited from lower address to higher, with
856 * ActRecs visited after the stack slots below them.
858 * This will not read the VM registers (pc, fp, sp), so it will
859 * perform the requested visitation independent of modifications to
860 * the VM stack or frame pointer.
862 template<class TV, class ARFun, class TVFun>
863 typename maybe_const<TV, TypedValue>::type
864 visitStackElems(const ActRec* const fp,
865 TV* const stackTop,
866 Offset const bcOffset,
867 ARFun arFun,
868 TVFun tvFun) {
869 const TypedValue* const base = Stack::anyFrameStackBase(fp);
870 auto cursor = stackTop;
871 assertx(cursor <= base);
873 if (auto fe = fp->m_func->findFPI(bcOffset)) {
874 for (;;) {
875 ActRec* ar;
876 if (!fp->resumed()) {
877 ar = arAtOffset(fp, -fe->m_fpOff);
878 } else {
879 // fp is pointing into the Resumable struct. Since fpOff is
880 // given as an offset from the frame pointer as if it were in
881 // the normal place on the main stack, we have to reconstruct
882 // that "normal place".
883 auto const fakePrevFP = reinterpret_cast<const ActRec*>(
884 base + fp->m_func->numSlotsInFrame()
886 ar = arAtOffset(fakePrevFP, -fe->m_fpOff);
889 assertx(cursor <= reinterpret_cast<TypedValue*>(ar));
890 while (cursor < reinterpret_cast<TypedValue*>(ar)) {
891 tvFun(cursor++);
893 arFun(ar, fe->m_fpushOff);
895 cursor += kNumActRecCells;
896 if (fe->m_parentIndex == -1) break;
897 fe = &fp->m_func->fpitab()[fe->m_parentIndex];
901 while (cursor < base) {
902 tvFun(cursor++);
906 void resetCoverageCounters();
908 // The interpOne*() methods implement individual opcode handlers.
909 using InterpOneFunc = jit::TCA (*) (ActRec*, TypedValue*, Offset);
910 extern InterpOneFunc interpOneEntryPoints[];
912 bool doFCallArrayTC(PC pc, int32_t numArgs, void*);
913 bool doFCall(ActRec* ar, PC& pc);
914 jit::TCA dispatchBB();
915 void pushFrameSlots(const Func* func, int nparams = 0);
916 Array getDefinedVariables(const ActRec*);
917 jit::TCA suspendStack(PC& pc);
919 enum class StackArgsState { // tells prepareFuncEntry how much work to do
920 // the stack may contain more arguments than the function expects
921 Untrimmed,
922 // the stack has already been trimmed of any extra arguments, which
923 // have been teleported away into ExtraArgs and/or a variadic param
924 Trimmed
926 void enterVMAtFunc(ActRec* enterFnAr, StackArgsState stk, VarEnv* varEnv);
927 void enterVMAtCurPC();
928 bool prepareArrayArgs(ActRec* ar, const Cell args, Stack& stack,
929 int nregular, bool doCufRefParamChecks,
930 TypedValue* retval, bool checkRefAnnot);
932 ///////////////////////////////////////////////////////////////////////////////
936 #endif