Fix refcounting in arReturn() and stop leaking static strings.
[hiphop-php.git] / hphp / runtime / vm / bytecode.h
blob6ffc7f8c3fcc5622c9426eed772bcf7c4753bdf9
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #ifndef incl_HPHP_VM_BYTECODE_H_
18 #define incl_HPHP_VM_BYTECODE_H_
20 #include "hphp/runtime/base/array-iterator.h"
21 #include "hphp/runtime/base/class-info.h"
22 #include "hphp/runtime/base/rds.h"
23 #include "hphp/runtime/base/rds-util.h"
24 #include "hphp/runtime/base/tv-arith.h"
25 #include "hphp/runtime/base/tv-conversions.h"
26 #include "hphp/runtime/base/tv-helpers.h"
28 #include "hphp/runtime/vm/class.h"
29 #include "hphp/runtime/vm/func.h"
30 #include "hphp/runtime/vm/name-value-table.h"
31 #include "hphp/runtime/vm/unit.h"
33 #include "hphp/util/arena.h"
35 #include <type_traits>
37 namespace HPHP {
39 /**
40 * These macros allow us to easily change the arguments to iop*() opcode
41 * implementations.
43 #define IOP_ARGS PC& pc
44 #define IOP_PASS_ARGS pc
45 #define IOP_PASS(pc) pc
47 #define EVAL_FILENAME_SUFFIX ") : eval()'d code"
49 ALWAYS_INLINE
50 void SETOP_BODY_CELL(Cell* lhs, SetOpOp op, Cell* rhs) {
51 assert(cellIsPlausible(*lhs));
52 assert(cellIsPlausible(*rhs));
54 switch (op) {
55 case SetOpOp::PlusEqual: cellAddEq(*lhs, *rhs); return;
56 case SetOpOp::MinusEqual: cellSubEq(*lhs, *rhs); return;
57 case SetOpOp::MulEqual: cellMulEq(*lhs, *rhs); return;
58 case SetOpOp::DivEqual: cellDivEq(*lhs, *rhs); return;
59 case SetOpOp::PowEqual: cellPowEq(*lhs, *rhs); return;
60 case SetOpOp::ModEqual: cellModEq(*lhs, *rhs); return;
61 case SetOpOp::ConcatEqual:
62 concat_assign(tvAsVariant(lhs), cellAsCVarRef(*rhs).toString());
63 return;
64 case SetOpOp::AndEqual: cellBitAndEq(*lhs, *rhs); return;
65 case SetOpOp::OrEqual: cellBitOrEq(*lhs, *rhs); return;
66 case SetOpOp::XorEqual: cellBitXorEq(*lhs, *rhs); return;
67 case SetOpOp::SlEqual: cellShlEq(*lhs, *rhs); return;
68 case SetOpOp::SrEqual: cellShrEq(*lhs, *rhs); return;
69 case SetOpOp::PlusEqualO: cellAddEqO(*lhs, *rhs); return;
70 case SetOpOp::MinusEqualO: cellSubEqO(*lhs, *rhs); return;
71 case SetOpOp::MulEqualO: cellMulEqO(*lhs, *rhs); return;
73 not_reached();
76 ALWAYS_INLINE
77 void SETOP_BODY(TypedValue* lhs, SetOpOp op, Cell* rhs) {
78 SETOP_BODY_CELL(tvToCell(lhs), op, rhs);
81 class Func;
82 struct ActRec;
84 struct ExtraArgs : private boost::noncopyable {
86 * Allocate an ExtraArgs structure, with arguments copied from the
87 * evaluation stack. This takes ownership of the args without
88 * adjusting reference counts, so they must be discarded from the
89 * stack.
91 static ExtraArgs* allocateCopy(TypedValue* args, unsigned nargs);
94 * Allocate an ExtraArgs, without initializing any of the arguments.
95 * All arguments must be initialized via getExtraArg before
96 * deallocate() is called for the returned pointer.
98 static ExtraArgs* allocateUninit(unsigned nargs);
101 * Deallocate an extraArgs structure. Either use the one that
102 * exists in a ActRec, or do it explicitly.
104 static void deallocate(ActRec*);
105 static void deallocate(ExtraArgs*, unsigned numArgs);
108 * Make a copy of ExtraArgs.
110 ExtraArgs* clone(ActRec* fp) const;
113 * Get the slot for extra arg i, where i = argNum - func->numParams.
115 TypedValue* getExtraArg(unsigned argInd) const;
117 private:
118 ExtraArgs();
119 ~ExtraArgs();
121 static void* allocMem(unsigned nargs);
123 private:
124 TypedValue m_extraArgs[];
128 * Variable environment.
130 * A variable environment consists of the locals for the current function
131 * (either pseudo-main, global function, or method), plus any variables that
132 * are dynamically defined.
134 * Logically, a global function or method starts off with a variable
135 * environment that contains only its locals, but a pseudo-main is handed
136 * its caller's existing variable environment. Generally, however, we don't
137 * create a variable environment for global functions or methods until it
138 * actually needs one (i.e. if it is about to include a pseudo-main, or if
139 * it uses dynamic variable lookups).
141 * Named locals always appear in the expected place on the stack, even after
142 * a VarEnv is attached. Internally uses a NameValueTable to hook up names to
143 * the local locations.
145 class VarEnv {
146 private:
147 NameValueTable m_nvTable;
148 ExtraArgs* m_extraArgs;
149 uint16_t m_depth;
150 bool m_global;
152 public:
153 explicit VarEnv();
154 explicit VarEnv(ActRec* fp, ExtraArgs* eArgs);
155 explicit VarEnv(const VarEnv* varEnv, ActRec* fp);
156 ~VarEnv();
158 // Allocates a local VarEnv and attaches it to the existing FP.
159 static VarEnv* createLocal(ActRec* fp);
161 // Allocate a global VarEnv. Initially not attached to any frame.
162 static VarEnv* createGlobal();
164 VarEnv* clone(ActRec* fp) const;
166 void suspend(const ActRec* oldFP, ActRec* newFP);
167 void enterFP(ActRec* oldFP, ActRec* newFP);
168 void exitFP(ActRec* fp);
170 void set(const StringData* name, const TypedValue* tv);
171 void bind(const StringData* name, TypedValue* tv);
172 void setWithRef(const StringData* name, TypedValue* tv);
173 TypedValue* lookup(const StringData* name);
174 TypedValue* lookupAdd(const StringData* name);
175 bool unset(const StringData* name);
177 Array getDefinedVariables() const;
179 // Used for save/store m_cfp for debugger
180 ActRec* getFP() const { return m_nvTable.getFP(); }
181 bool isGlobalScope() const { return m_global; }
183 // Access to wrapped ExtraArgs, if we have one.
184 TypedValue* getExtraArg(unsigned argInd) const;
188 * An "ActRec" is a call activation record. The ordering of the fields assumes
189 * that stacks grow toward lower addresses.
191 * For most purposes, an ActRec can be considered to be in one of three
192 * possible states:
193 * Pre-live:
194 * After the FPush* instruction which materialized the ActRec on the stack
195 * but before the corresponding FCall instruction
196 * Live:
197 * After the corresponding FCall instruction but before the ActRec fields
198 * and locals/iters have been decref'd (either by return or unwinding)
199 * Post-live:
200 * After the ActRec fields and locals/iters have been decref'd
202 * Note that when a function is invoked by the runtime via invokeFunc(), the
203 * "pre-live" state is skipped and the ActRec is materialized in the "live"
204 * state.
206 struct ActRec {
207 union {
208 // This pair of uint64_t's must be the first two elements in the structure
209 // so that the pointer to the ActRec can also be used for RBP chaining.
210 // Note that ActRec's are also x64 frames, so this is an implicit machine
211 // dependency.
212 TypedValue _dummyA;
213 struct {
214 ActRec* m_sfp; // Previous hardware frame pointer/ActRec.
215 uint64_t m_savedRip; // In-TC address to return to.
218 union {
219 TypedValue _dummyB;
220 struct {
221 const Func* m_func; // Function.
222 uint32_t m_soff; // Saved offset of caller from beginning of
223 // caller's Func's bytecode.
225 // Bits 0-28 are the number of function args.
226 // Bit 29 is whether the locals were already decrefd (used by unwinder)
227 // Bit 30 is whether this ActRec is embedded in a Resumable object.
228 // Bit 31 is whether this ActRec came from FPushCtor*.
229 uint32_t m_numArgsAndFlags;
232 union {
233 TypedValue m_r; // Return value teleported here when the ActRec
234 // is post-live.
235 struct {
236 union {
237 ObjectData* m_this; // This.
238 Class* m_cls; // Late bound class.
240 union {
241 VarEnv* m_varEnv; // Variable environment; only used when the
242 // ActRec is live.
243 ExtraArgs* m_extraArgs; // Light-weight extra args; used only when the
244 // ActRec is live.
245 StringData* m_invName; // Invoked function name (used for __call);
246 // only used when ActRec is pre-live.
251 // Get the next outermost VM frame, but if this is
252 // a re-entry frame, return nullptr
253 ActRec* sfp() const;
255 void setReturn(ActRec* fp, PC pc, void* retAddr);
256 void setReturnVMExit();
258 // skip this frame if it is for a builtin function
259 bool skipFrame() const;
262 * Accessors for the packed m_numArgsAndFlags field. We track
263 * whether ActRecs came from FPushCtor* so that during unwinding we
264 * can set the flag not to call destructors for objects whose
265 * constructors exit via an exception.
268 static constexpr int kNumArgsBits = 29;
269 static constexpr int kNumArgsMask = (1 << kNumArgsBits) - 1;
270 static constexpr int kFlagsMask = ~kNumArgsMask;
272 static constexpr int kLocalsDecRefdShift = kNumArgsBits;
273 static constexpr int kResumedShift = kNumArgsBits + 1;
274 static constexpr int kFPushCtorShift = kNumArgsBits + 2;
276 static_assert(kFPushCtorShift <= 8 * sizeof(int32_t) - 1,
277 "Out of bits in ActRec");
279 static constexpr int kLocalsDecRefdMask = 1 << kLocalsDecRefdShift;
280 static constexpr int kResumedMask = 1 << kResumedShift;
281 static constexpr int kFPushCtorMask = 1 << kFPushCtorShift;
283 int32_t numArgs() const {
284 return m_numArgsAndFlags & kNumArgsMask;
287 bool localsDecRefd() const {
288 return m_numArgsAndFlags & kLocalsDecRefdMask;
291 bool resumed() const {
292 return m_numArgsAndFlags & kResumedMask;
295 void setResumed() {
296 m_numArgsAndFlags |= kResumedMask;
299 bool isFromFPushCtor() const {
300 return m_numArgsAndFlags & kFPushCtorMask;
303 static inline uint32_t
304 encodeNumArgs(uint32_t numArgs, bool localsDecRefd, bool resumed,
305 bool isFPushCtor) {
306 assert((numArgs & kFlagsMask) == 0);
307 return numArgs |
308 (localsDecRefd << kLocalsDecRefdShift) |
309 (resumed << kResumedShift) |
310 (isFPushCtor << kFPushCtorShift);
313 void initNumArgs(uint32_t numArgs) {
314 m_numArgsAndFlags = encodeNumArgs(numArgs, false, false, false);
317 void initNumArgsFromResumable(uint32_t numArgs) {
318 m_numArgsAndFlags = encodeNumArgs(numArgs, false, true, false);
321 void initNumArgsFromFPushCtor(uint32_t numArgs) {
322 m_numArgsAndFlags = encodeNumArgs(numArgs, false, false, true);
325 void setNumArgs(uint32_t numArgs) {
326 m_numArgsAndFlags = encodeNumArgs(numArgs, localsDecRefd(), resumed(),
327 isFromFPushCtor());
330 void setLocalsDecRefd() {
331 assert(!localsDecRefd());
332 m_numArgsAndFlags |= kLocalsDecRefdMask;
335 static void* encodeThis(ObjectData* obj, Class* cls) {
336 if (obj) return obj;
337 if (cls) return (char*)cls + 1;
338 not_reached();
341 static void* encodeThis(ObjectData* obj) { return obj; }
342 static void* encodeClass(const Class* cls) {
343 return cls ? (char*)cls + 1 : nullptr;
345 static ObjectData* decodeThis(void* p) {
346 return (uintptr_t(p) & 1) ? nullptr : (ObjectData*)p;
348 static Class* decodeClass(void* p) {
349 return (uintptr_t(p) & 1) ? (Class*)(uintptr_t(p)&~1LL) : nullptr;
352 void setThisOrClass(void* objOrCls) {
353 setThisOrClassAllowNull(objOrCls);
354 assert(hasThis() || hasClass());
356 void setThisOrClassAllowNull(void* objOrCls) {
357 m_this = (ObjectData*)objOrCls;
360 void* getThisOrClass() const {
361 return m_this;
364 const Unit* unit() const {
365 func()->validate();
366 return func()->unit();
369 const Func* func() const {
370 return m_func;
374 * To conserve space, we use unions for pairs of mutually exclusive
375 * fields (fields that are not used at the same time). We use unions
376 * for m_this/m_cls and m_varEnv/m_invName.
378 * The least significant bit is used as a marker for each pair of fields
379 * so that we can distinguish at runtime which field is valid. We define
380 * accessors below to encapsulate this logic.
382 * Note that m_invName is only used when the ActRec is pre-live. Thus when
383 * an ActRec is live it is safe to directly access m_varEnv without using
384 * accessors.
387 static constexpr int8_t kHasClassBit = 0x1;
388 static constexpr int8_t kClassMask = ~kHasClassBit;
390 inline bool hasThis() const {
391 return m_this && !(reinterpret_cast<intptr_t>(m_this) & kHasClassBit);
393 inline ObjectData* getThis() const {
394 assert(hasThis());
395 return m_this;
397 inline void setThis(ObjectData* val) {
398 m_this = val;
400 inline bool hasClass() const {
401 return reinterpret_cast<intptr_t>(m_cls) & kHasClassBit;
403 inline Class* getClass() const {
404 assert(hasClass());
405 return reinterpret_cast<Class*>(
406 reinterpret_cast<intptr_t>(m_cls) & kClassMask);
408 inline void setClass(Class* val) {
409 m_cls = reinterpret_cast<Class*>(
410 reinterpret_cast<intptr_t>(val) | kHasClassBit);
413 // Note that reordering these is likely to require changes to the translator.
414 static constexpr int8_t kInvNameBit = 0x1;
415 static constexpr int8_t kInvNameMask = ~kInvNameBit;
416 static constexpr int8_t kExtraArgsBit = 0x2;
417 static constexpr int8_t kExtraArgsMask = ~kExtraArgsBit;
419 inline bool hasVarEnv() const {
420 return m_varEnv &&
421 !(reinterpret_cast<intptr_t>(m_varEnv) & (kInvNameBit | kExtraArgsBit));
423 inline bool hasInvName() const {
424 return reinterpret_cast<intptr_t>(m_invName) & kInvNameBit;
426 inline bool hasExtraArgs() const {
427 return reinterpret_cast<intptr_t>(m_extraArgs) & kExtraArgsBit;
429 inline VarEnv* getVarEnv() const {
430 assert(hasVarEnv());
431 return m_varEnv;
433 inline StringData* getInvName() const {
434 assert(hasInvName());
435 return reinterpret_cast<StringData*>(
436 reinterpret_cast<intptr_t>(m_invName) & kInvNameMask);
438 inline ExtraArgs* getExtraArgs() const {
439 return reinterpret_cast<ExtraArgs*>(
440 reinterpret_cast<intptr_t>(m_extraArgs) & kExtraArgsMask);
442 inline void setVarEnv(VarEnv* val) {
443 m_varEnv = val;
445 inline void setInvName(StringData* val) {
446 m_invName = reinterpret_cast<StringData*>(
447 reinterpret_cast<intptr_t>(val) | kInvNameBit);
449 inline void setExtraArgs(ExtraArgs* val) {
450 m_extraArgs = reinterpret_cast<ExtraArgs*>(
451 reinterpret_cast<intptr_t>(val) | kExtraArgsBit);
454 // Accessors for extra arg queries.
455 TypedValue* getExtraArg(unsigned ind) const {
456 assert(hasExtraArgs() || hasVarEnv());
457 return hasExtraArgs() ? getExtraArgs()->getExtraArg(ind) :
458 hasVarEnv() ? getVarEnv()->getExtraArg(ind) :
459 static_cast<TypedValue*>(0);
463 static_assert(offsetof(ActRec, m_sfp) == 0,
464 "m_sfp should be at offset 0 of ActRec");
466 inline int32_t arOffset(const ActRec* ar, const ActRec* other) {
467 return (intptr_t(other) - intptr_t(ar)) / sizeof(TypedValue);
470 inline ActRec* arAtOffset(const ActRec* ar, int32_t offset) {
471 return (ActRec*)(intptr_t(ar) + intptr_t(offset * sizeof(TypedValue)));
474 inline ActRec* arFromSpOffset(const ActRec *sp, int32_t offset) {
475 return arAtOffset(sp, offset);
478 inline TypedValue* arReturn(ActRec* ar, Variant&& value) {
479 ar->m_r = *value.asTypedValue();
480 tvWriteNull(value.asTypedValue());
481 return &ar->m_r;
484 template <bool crossBuiltin> Class* arGetContextClassImpl(const ActRec* ar);
485 template <> Class* arGetContextClassImpl<true>(const ActRec* ar);
486 template <> Class* arGetContextClassImpl<false>(const ActRec* ar);
487 inline Class* arGetContextClass(const ActRec* ar) {
488 return arGetContextClassImpl<false>(ar);
490 inline Class* arGetContextClassFromBuiltin(const ActRec* ar) {
491 return arGetContextClassImpl<true>(ar);
494 // Used by extension functions that take a PHP "callback", since they need to
495 // figure out the callback context once and call it multiple times. (e.g.
496 // array_map, array_filter, ...)
497 struct CallCtx {
498 const Func* func;
499 ObjectData* this_;
500 Class* cls;
501 StringData* invName;
504 constexpr size_t kNumIterCells = sizeof(Iter) / sizeof(Cell);
505 constexpr size_t kNumActRecCells = sizeof(ActRec) / sizeof(Cell);
508 * We pad all stack overflow checks by a small amount to allow for three
509 * things:
511 * - inlining functions without having to either do another stack
512 * check (or chase down prologues to smash checks to be bigger).
514 * - omitting stack overflow checks on leaf functions
516 * - delaying stack overflow checks on reentry
518 constexpr int kStackCheckLeafPadding = 20;
519 constexpr int kStackCheckReenterPadding = 9;
520 constexpr int kStackCheckPadding = kStackCheckLeafPadding +
521 kStackCheckReenterPadding;
523 constexpr int kInvalidRaiseLevel = -1;
524 constexpr int kInvalidNesting = -1;
526 struct Fault {
527 enum class Type : int16_t {
528 UserException,
529 CppException
532 explicit Fault()
533 : m_raiseNesting(kInvalidNesting),
534 m_raiseFrame(nullptr),
535 m_raiseOffset(kInvalidOffset),
536 m_handledCount(0) {}
538 union {
539 ObjectData* m_userException;
540 Exception* m_cppException;
542 Type m_faultType;
544 // The VM nesting at the moment where the exception was thrown.
545 int m_raiseNesting;
546 // The frame where the exception was thrown.
547 ActRec* m_raiseFrame;
548 // The offset within the frame where the exception was thrown.
549 // This value is updated when a fault is updated when exception
550 // chaining takes place. In this case the raise offset of the newly
551 // thrown exception is set to the offset of the previously thrown
552 // exception. The offset is also updated when the exception
553 // propagates outside its current frame.
554 Offset m_raiseOffset;
555 // The number of EHs that were already examined for this exception.
556 // This is used to ensure that the same exception handler is not
557 // run twice for the same exception. The unwinder may be entered
558 // multiple times for the same fault as a result of calling Unwind.
559 // The field is used to skip through the EHs that were already run.
560 int m_handledCount;
563 // Interpreter evaluation stack.
564 class Stack {
565 TypedValue* m_elms;
566 TypedValue* m_top;
567 TypedValue* m_base; // Stack grows down, so m_base is beyond the end of
568 // m_elms.
570 public:
571 void* getStackLowAddress() const { return m_elms; }
572 void* getStackHighAddress() const { return m_base; }
573 bool isValidAddress(uintptr_t v) {
574 return v >= uintptr_t(m_elms) && v < uintptr_t(m_base);
576 void requestInit();
577 void requestExit();
579 static const int sSurprisePageSize;
580 static const unsigned sMinStackElms;
581 static void ValidateStackSize();
582 Stack();
583 ~Stack();
585 std::string toString(const ActRec* fp, int offset,
586 std::string prefix="") const;
588 bool wouldOverflow(int numCells) const;
591 * top --
592 * topOfStackOffset --
594 * Accessors for the x64 translator. Do not play on or around.
596 TypedValue*& top() {
597 return m_top;
600 static constexpr size_t topOfStackOffset() {
601 return offsetof(Stack, m_top);
604 static TypedValue* frameStackBase(const ActRec* fp);
605 static TypedValue* resumableStackBase(const ActRec* fp);
607 ALWAYS_INLINE
608 size_t count() const {
609 return ((uintptr_t)m_base - (uintptr_t)m_top) / sizeof(TypedValue);
612 // Same as discard(), but meant to replace popC() iff the interpreter knows
613 // for certain that decrementing a refcount is unnecessary.
614 ALWAYS_INLINE
615 void popX() {
616 assert(m_top != m_base);
617 assert(!IS_REFCOUNTED_TYPE(m_top->m_type));
618 tvDebugTrash(m_top);
619 m_top++;
622 ALWAYS_INLINE
623 void popC() {
624 assert(m_top != m_base);
625 assert(cellIsPlausible(*m_top));
626 tvRefcountedDecRefCell(m_top);
627 tvDebugTrash(m_top);
628 m_top++;
631 ALWAYS_INLINE
632 void popA() {
633 assert(m_top != m_base);
634 assert(m_top->m_type == KindOfClass);
635 tvDebugTrash(m_top);
636 m_top++;
639 ALWAYS_INLINE
640 void popV() {
641 assert(m_top != m_base);
642 assert(refIsPlausible(*m_top));
643 tvDecRefRef(m_top);
644 tvDebugTrash(m_top);
645 m_top++;
648 ALWAYS_INLINE
649 void popTV() {
650 assert(m_top != m_base);
651 assert(m_top->m_type == KindOfClass || tvIsPlausible(*m_top));
652 tvRefcountedDecRef(m_top);
653 tvDebugTrash(m_top);
654 m_top++;
657 // popAR() should only be used to tear down a pre-live ActRec. Once
658 // an ActRec is live, it should be torn down using frame_free_locals()
659 // followed by discardAR() or ret().
660 ALWAYS_INLINE
661 void popAR() {
662 assert(m_top != m_base);
663 ActRec* ar = (ActRec*)m_top;
664 if (ar->hasThis()) decRefObj(ar->getThis());
665 if (ar->hasInvName()) decRefStr(ar->getInvName());
667 // This should only be used on a pre-live ActRec.
668 assert(!ar->hasVarEnv());
669 assert(!ar->hasExtraArgs());
670 discardAR();
673 ALWAYS_INLINE
674 void discardAR() {
675 assert(m_top != m_base);
676 if (debug) {
677 for (int i = 0; i < kNumActRecCells; ++i) {
678 tvDebugTrash(m_top + i);
681 m_top += kNumActRecCells;
682 assert((uintptr_t)m_top <= (uintptr_t)m_base);
685 ALWAYS_INLINE
686 void ret() {
687 // Leave part of the activation on the stack, since the return value now
688 // resides there.
689 if (debug) {
690 for (int i = 0; i < kNumActRecCells - 1; ++i) {
691 tvDebugTrash(m_top + i);
694 m_top += kNumActRecCells - 1;
695 assert((uintptr_t)m_top <= (uintptr_t)m_base);
698 ALWAYS_INLINE
699 void discard() {
700 assert(m_top != m_base);
701 tvDebugTrash(m_top);
702 m_top++;
705 ALWAYS_INLINE
706 void ndiscard(size_t n) {
707 assert((uintptr_t)&m_top[n] <= (uintptr_t)m_base);
708 if (debug) {
709 for (int i = 0; i < n; ++i) {
710 tvDebugTrash(m_top + i);
713 m_top += n;
716 ALWAYS_INLINE
717 void dup() {
718 assert(m_top != m_base);
719 assert(m_top != m_elms);
720 assert(m_top->m_type != KindOfRef);
721 Cell* fr = m_top;
722 m_top--;
723 Cell* to = m_top;
724 cellDup(*fr, *to);
727 ALWAYS_INLINE
728 void box() {
729 assert(m_top != m_base);
730 assert(m_top->m_type != KindOfRef);
731 tvBox(m_top);
734 ALWAYS_INLINE
735 void unbox() {
736 assert(m_top != m_base);
737 tvUnbox(m_top);
740 ALWAYS_INLINE
741 void pushUninit() {
742 assert(m_top != m_elms);
743 m_top--;
744 tvWriteUninit(m_top);
747 ALWAYS_INLINE
748 void pushNull() {
749 assert(m_top != m_elms);
750 m_top--;
751 tvWriteNull(m_top);
754 ALWAYS_INLINE
755 void pushNullUninit() {
756 assert(m_top != m_elms);
757 m_top--;
758 m_top->m_data.num = 0;
759 m_top->m_type = KindOfUninit;
762 #define PUSH_METHOD(name, type, field, value) \
763 ALWAYS_INLINE void push##name() { \
764 assert(m_top != m_elms); \
765 m_top--; \
766 m_top->m_data.field = value; \
767 m_top->m_type = type; \
769 PUSH_METHOD(True, KindOfBoolean, num, 1)
770 PUSH_METHOD(False, KindOfBoolean, num, 0)
772 #define PUSH_METHOD_ARG(name, type, field, argtype, arg) \
773 ALWAYS_INLINE void push##name(argtype arg) { \
774 assert(m_top != m_elms); \
775 m_top--; \
776 m_top->m_data.field = arg; \
777 m_top->m_type = type; \
779 PUSH_METHOD_ARG(Bool, KindOfBoolean, num, bool, b)
780 PUSH_METHOD_ARG(Int, KindOfInt64, num, int64_t, i)
781 PUSH_METHOD_ARG(Double, KindOfDouble, dbl, double, d)
783 // This should only be called directly when the caller has
784 // already adjusted the refcount appropriately
785 ALWAYS_INLINE
786 void pushStringNoRc(StringData* s) {
787 assert(m_top != m_elms);
788 m_top--;
789 m_top->m_data.pstr = s;
790 m_top->m_type = KindOfString;
793 ALWAYS_INLINE
794 void pushStaticString(StringData* s) {
795 assert(s->isStatic()); // No need to call s->incRefCount().
796 assert(m_top != m_elms);
797 m_top--;
798 m_top->m_data.pstr = s;
799 m_top->m_type = KindOfStaticString;
802 // This should only be called directly when the caller has
803 // already adjusted the refcount appropriately
804 ALWAYS_INLINE
805 void pushArrayNoRc(ArrayData* a) {
806 assert(m_top != m_elms);
807 m_top--;
808 m_top->m_data.parr = a;
809 m_top->m_type = KindOfArray;
812 ALWAYS_INLINE
813 void pushArray(ArrayData* a) {
814 assert(a);
815 pushArrayNoRc(a);
816 a->incRefCount();
819 ALWAYS_INLINE
820 void pushStaticArray(ArrayData* a) {
821 assert(a->isStatic()); // No need to call a->incRefCount().
822 pushArrayNoRc(a);
825 // This should only be called directly when the caller has
826 // already adjusted the refcount appropriately
827 ALWAYS_INLINE
828 void pushObjectNoRc(ObjectData* o) {
829 assert(m_top != m_elms);
830 m_top--;
831 m_top->m_data.pobj = o;
832 m_top->m_type = KindOfObject;
835 ALWAYS_INLINE
836 void pushObject(ObjectData* o) {
837 pushObjectNoRc(o);
838 o->incRefCount();
841 ALWAYS_INLINE
842 void nalloc(size_t n) {
843 assert((uintptr_t)&m_top[-n] <= (uintptr_t)m_base);
844 m_top -= n;
847 ALWAYS_INLINE
848 Cell* allocC() {
849 assert(m_top != m_elms);
850 m_top--;
851 return (Cell*)m_top;
854 ALWAYS_INLINE
855 Ref* allocV() {
856 assert(m_top != m_elms);
857 m_top--;
858 return (Ref*)m_top;
861 ALWAYS_INLINE
862 TypedValue* allocTV() {
863 assert(m_top != m_elms);
864 m_top--;
865 return m_top;
868 ALWAYS_INLINE
869 ActRec* allocA() {
870 assert((uintptr_t)&m_top[-kNumActRecCells] >= (uintptr_t)m_elms);
871 assert(kNumActRecCells * sizeof(Cell) == sizeof(ActRec));
872 m_top -= kNumActRecCells;
873 return (ActRec*)m_top;
876 ALWAYS_INLINE
877 void allocI() {
878 assert(kNumIterCells * sizeof(Cell) == sizeof(Iter));
879 assert((uintptr_t)&m_top[-kNumIterCells] >= (uintptr_t)m_elms);
880 m_top -= kNumIterCells;
883 ALWAYS_INLINE
884 void replaceC(const Cell& c) {
885 assert(m_top != m_base);
886 assert(m_top->m_type != KindOfRef);
887 tvRefcountedDecRefCell(m_top);
888 *m_top = c;
891 template <DataType DT>
892 ALWAYS_INLINE
893 void replaceC() {
894 assert(m_top != m_base);
895 assert(m_top->m_type != KindOfRef);
896 tvRefcountedDecRefCell(m_top);
897 *m_top = make_tv<DT>();
900 template <DataType DT, typename T>
901 ALWAYS_INLINE
902 void replaceC(T value) {
903 assert(m_top != m_base);
904 assert(m_top->m_type != KindOfRef);
905 tvRefcountedDecRefCell(m_top);
906 *m_top = make_tv<DT>(value);
909 ALWAYS_INLINE
910 void replaceTV(const TypedValue& tv) {
911 assert(m_top != m_base);
912 tvRefcountedDecRef(m_top);
913 *m_top = tv;
916 template <DataType DT>
917 ALWAYS_INLINE
918 void replaceTV() {
919 assert(m_top != m_base);
920 tvRefcountedDecRef(m_top);
921 *m_top = make_tv<DT>();
924 template <DataType DT, typename T>
925 ALWAYS_INLINE
926 void replaceTV(T value) {
927 assert(m_top != m_base);
928 tvRefcountedDecRef(m_top);
929 *m_top = make_tv<DT>(value);
932 ALWAYS_INLINE
933 Cell* topC() {
934 assert(m_top != m_base);
935 assert(m_top->m_type != KindOfRef);
936 return (Cell*)m_top;
939 ALWAYS_INLINE
940 Ref* topV() {
941 assert(m_top != m_base);
942 assert(m_top->m_type == KindOfRef);
943 return (Ref*)m_top;
946 ALWAYS_INLINE
947 const Class* topA() {
948 assert(m_top != m_base);
949 assert(m_top->m_type == KindOfClass);
950 return m_top->m_data.pcls;
953 ALWAYS_INLINE
954 TypedValue* topTV() {
955 assert(m_top != m_base);
956 return m_top;
959 ALWAYS_INLINE
960 Cell* indC(size_t ind) {
961 assert(m_top != m_base);
962 assert(m_top[ind].m_type != KindOfRef);
963 return (Cell*)(&m_top[ind]);
966 ALWAYS_INLINE
967 TypedValue* indTV(size_t ind) {
968 assert(m_top != m_base);
969 return &m_top[ind];
972 ALWAYS_INLINE
973 void pushClass(Class* clss) {
974 assert(m_top != m_elms);
975 m_top--;
976 m_top->m_data.pcls = clss;
977 m_top->m_type = KindOfClass;
981 //////////////////////////////////////////////////////////////////////
984 * Visit all the slots and pre-live ActRecs on a live eval stack,
985 * handling FPI regions and resumables correctly, and stopping when we
986 * reach the supplied activation record.
988 * The stack elements are visited from lower address to higher, with
989 * ActRecs visited after the stack slots below them.
991 * This will not read the VM registers (pc, fp, sp), so it will
992 * perform the requested visitation independent of modifications to
993 * the VM stack or frame pointer.
995 template<class MaybeConstTVPtr, class ARFun, class TVFun>
996 typename std::enable_if<
997 std::is_same<MaybeConstTVPtr,const TypedValue*>::value ||
998 std::is_same<MaybeConstTVPtr, TypedValue*>::value
999 >::type
1000 visitStackElems(const ActRec* const fp,
1001 MaybeConstTVPtr const stackTop,
1002 Offset const bcOffset,
1003 ARFun arFun,
1004 TVFun tvFun) {
1005 const TypedValue* const base =
1006 fp->resumed() ? Stack::resumableStackBase(fp)
1007 : Stack::frameStackBase(fp);
1008 MaybeConstTVPtr cursor = stackTop;
1009 assert(cursor <= base);
1011 if (auto fe = fp->m_func->findFPI(bcOffset)) {
1012 for (;;) {
1013 ActRec* ar;
1014 if (!fp->resumed()) {
1015 ar = arAtOffset(fp, -fe->m_fpOff);
1016 } else {
1017 // fp is pointing into the Resumable struct. Since fpOff is
1018 // given as an offset from the frame pointer as if it were in
1019 // the normal place on the main stack, we have to reconstruct
1020 // that "normal place".
1021 auto const fakePrevFP = reinterpret_cast<const ActRec*>(
1022 base + fp->m_func->numSlotsInFrame()
1024 ar = arAtOffset(fakePrevFP, -fe->m_fpOff);
1027 assert(cursor <= reinterpret_cast<TypedValue*>(ar));
1028 while (cursor < reinterpret_cast<TypedValue*>(ar)) {
1029 tvFun(cursor++);
1031 arFun(ar);
1033 cursor += kNumActRecCells;
1034 if (fe->m_parentIndex == -1) break;
1035 fe = &fp->m_func->fpitab()[fe->m_parentIndex];
1039 while (cursor < base) {
1040 tvFun(cursor++);
1044 ///////////////////////////////////////////////////////////////////////////////
1048 #endif