2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #ifndef incl_HPHP_VM_BYTECODE_H_
18 #define incl_HPHP_VM_BYTECODE_H_
20 #include "hphp/runtime/base/array-iterator.h"
21 #include "hphp/runtime/base/class-info.h"
22 #include "hphp/runtime/base/rds.h"
23 #include "hphp/runtime/base/rds-util.h"
24 #include "hphp/runtime/base/tv-arith.h"
25 #include "hphp/runtime/base/tv-conversions.h"
26 #include "hphp/runtime/base/tv-helpers.h"
28 #include "hphp/runtime/vm/class.h"
29 #include "hphp/runtime/vm/func.h"
30 #include "hphp/runtime/vm/name-value-table.h"
31 #include "hphp/runtime/vm/unit.h"
33 #include "hphp/util/arena.h"
35 #include <type_traits>
40 * These macros allow us to easily change the arguments to iop*() opcode
43 #define IOP_ARGS PC& pc
44 #define IOP_PASS_ARGS pc
45 #define IOP_PASS(pc) pc
47 #define EVAL_FILENAME_SUFFIX ") : eval()'d code"
50 void SETOP_BODY_CELL(Cell
* lhs
, SetOpOp op
, Cell
* rhs
) {
51 assert(cellIsPlausible(*lhs
));
52 assert(cellIsPlausible(*rhs
));
55 case SetOpOp::PlusEqual
: cellAddEq(*lhs
, *rhs
); return;
56 case SetOpOp::MinusEqual
: cellSubEq(*lhs
, *rhs
); return;
57 case SetOpOp::MulEqual
: cellMulEq(*lhs
, *rhs
); return;
58 case SetOpOp::DivEqual
: cellDivEq(*lhs
, *rhs
); return;
59 case SetOpOp::PowEqual
: cellPowEq(*lhs
, *rhs
); return;
60 case SetOpOp::ModEqual
: cellModEq(*lhs
, *rhs
); return;
61 case SetOpOp::ConcatEqual
:
62 concat_assign(tvAsVariant(lhs
), cellAsCVarRef(*rhs
).toString());
64 case SetOpOp::AndEqual
: cellBitAndEq(*lhs
, *rhs
); return;
65 case SetOpOp::OrEqual
: cellBitOrEq(*lhs
, *rhs
); return;
66 case SetOpOp::XorEqual
: cellBitXorEq(*lhs
, *rhs
); return;
67 case SetOpOp::SlEqual
: cellShlEq(*lhs
, *rhs
); return;
68 case SetOpOp::SrEqual
: cellShrEq(*lhs
, *rhs
); return;
69 case SetOpOp::PlusEqualO
: cellAddEqO(*lhs
, *rhs
); return;
70 case SetOpOp::MinusEqualO
: cellSubEqO(*lhs
, *rhs
); return;
71 case SetOpOp::MulEqualO
: cellMulEqO(*lhs
, *rhs
); return;
77 void SETOP_BODY(TypedValue
* lhs
, SetOpOp op
, Cell
* rhs
) {
78 SETOP_BODY_CELL(tvToCell(lhs
), op
, rhs
);
84 struct ExtraArgs
: private boost::noncopyable
{
86 * Allocate an ExtraArgs structure, with arguments copied from the
87 * evaluation stack. This takes ownership of the args without
88 * adjusting reference counts, so they must be discarded from the
91 static ExtraArgs
* allocateCopy(TypedValue
* args
, unsigned nargs
);
94 * Allocate an ExtraArgs, without initializing any of the arguments.
95 * All arguments must be initialized via getExtraArg before
96 * deallocate() is called for the returned pointer.
98 static ExtraArgs
* allocateUninit(unsigned nargs
);
101 * Deallocate an extraArgs structure. Either use the one that
102 * exists in a ActRec, or do it explicitly.
104 static void deallocate(ActRec
*);
105 static void deallocate(ExtraArgs
*, unsigned numArgs
);
108 * Make a copy of ExtraArgs.
110 ExtraArgs
* clone(ActRec
* fp
) const;
113 * Get the slot for extra arg i, where i = argNum - func->numParams.
115 TypedValue
* getExtraArg(unsigned argInd
) const;
121 static void* allocMem(unsigned nargs
);
124 TypedValue m_extraArgs
[];
128 * Variable environment.
130 * A variable environment consists of the locals for the current function
131 * (either pseudo-main, global function, or method), plus any variables that
132 * are dynamically defined.
134 * Logically, a global function or method starts off with a variable
135 * environment that contains only its locals, but a pseudo-main is handed
136 * its caller's existing variable environment. Generally, however, we don't
137 * create a variable environment for global functions or methods until it
138 * actually needs one (i.e. if it is about to include a pseudo-main, or if
139 * it uses dynamic variable lookups).
141 * Named locals always appear in the expected place on the stack, even after
142 * a VarEnv is attached. Internally uses a NameValueTable to hook up names to
143 * the local locations.
147 NameValueTable m_nvTable
;
148 ExtraArgs
* m_extraArgs
;
154 explicit VarEnv(ActRec
* fp
, ExtraArgs
* eArgs
);
155 explicit VarEnv(const VarEnv
* varEnv
, ActRec
* fp
);
158 // Allocates a local VarEnv and attaches it to the existing FP.
159 static VarEnv
* createLocal(ActRec
* fp
);
161 // Allocate a global VarEnv. Initially not attached to any frame.
162 static VarEnv
* createGlobal();
164 VarEnv
* clone(ActRec
* fp
) const;
166 void suspend(const ActRec
* oldFP
, ActRec
* newFP
);
167 void enterFP(ActRec
* oldFP
, ActRec
* newFP
);
168 void exitFP(ActRec
* fp
);
170 void set(const StringData
* name
, const TypedValue
* tv
);
171 void bind(const StringData
* name
, TypedValue
* tv
);
172 void setWithRef(const StringData
* name
, TypedValue
* tv
);
173 TypedValue
* lookup(const StringData
* name
);
174 TypedValue
* lookupAdd(const StringData
* name
);
175 bool unset(const StringData
* name
);
177 Array
getDefinedVariables() const;
179 // Used for save/store m_cfp for debugger
180 ActRec
* getFP() const { return m_nvTable
.getFP(); }
181 bool isGlobalScope() const { return m_global
; }
183 // Access to wrapped ExtraArgs, if we have one.
184 TypedValue
* getExtraArg(unsigned argInd
) const;
188 * An "ActRec" is a call activation record. The ordering of the fields assumes
189 * that stacks grow toward lower addresses.
191 * For most purposes, an ActRec can be considered to be in one of three
194 * After the FPush* instruction which materialized the ActRec on the stack
195 * but before the corresponding FCall instruction
197 * After the corresponding FCall instruction but before the ActRec fields
198 * and locals/iters have been decref'd (either by return or unwinding)
200 * After the ActRec fields and locals/iters have been decref'd
202 * Note that when a function is invoked by the runtime via invokeFunc(), the
203 * "pre-live" state is skipped and the ActRec is materialized in the "live"
208 // This pair of uint64_t's must be the first two elements in the structure
209 // so that the pointer to the ActRec can also be used for RBP chaining.
210 // Note that ActRec's are also x64 frames, so this is an implicit machine
214 ActRec
* m_sfp
; // Previous hardware frame pointer/ActRec.
215 uint64_t m_savedRip
; // In-TC address to return to.
221 const Func
* m_func
; // Function.
222 uint32_t m_soff
; // Saved offset of caller from beginning of
223 // caller's Func's bytecode.
225 // Bits 0-28 are the number of function args.
226 // Bit 29 is whether the locals were already decrefd (used by unwinder)
227 // Bit 30 is whether this ActRec is embedded in a Resumable object.
228 // Bit 31 is whether this ActRec came from FPushCtor*.
229 uint32_t m_numArgsAndFlags
;
233 TypedValue m_r
; // Return value teleported here when the ActRec
237 ObjectData
* m_this
; // This.
238 Class
* m_cls
; // Late bound class.
241 VarEnv
* m_varEnv
; // Variable environment; only used when the
243 ExtraArgs
* m_extraArgs
; // Light-weight extra args; used only when the
245 StringData
* m_invName
; // Invoked function name (used for __call);
246 // only used when ActRec is pre-live.
251 // Get the next outermost VM frame, but if this is
252 // a re-entry frame, return nullptr
255 void setReturn(ActRec
* fp
, PC pc
, void* retAddr
);
256 void setReturnVMExit();
258 // skip this frame if it is for a builtin function
259 bool skipFrame() const;
262 * Accessors for the packed m_numArgsAndFlags field. We track
263 * whether ActRecs came from FPushCtor* so that during unwinding we
264 * can set the flag not to call destructors for objects whose
265 * constructors exit via an exception.
268 static constexpr int kNumArgsBits
= 29;
269 static constexpr int kNumArgsMask
= (1 << kNumArgsBits
) - 1;
270 static constexpr int kFlagsMask
= ~kNumArgsMask
;
272 static constexpr int kLocalsDecRefdShift
= kNumArgsBits
;
273 static constexpr int kResumedShift
= kNumArgsBits
+ 1;
274 static constexpr int kFPushCtorShift
= kNumArgsBits
+ 2;
276 static_assert(kFPushCtorShift
<= 8 * sizeof(int32_t) - 1,
277 "Out of bits in ActRec");
279 static constexpr int kLocalsDecRefdMask
= 1 << kLocalsDecRefdShift
;
280 static constexpr int kResumedMask
= 1 << kResumedShift
;
281 static constexpr int kFPushCtorMask
= 1 << kFPushCtorShift
;
283 int32_t numArgs() const {
284 return m_numArgsAndFlags
& kNumArgsMask
;
287 bool localsDecRefd() const {
288 return m_numArgsAndFlags
& kLocalsDecRefdMask
;
291 bool resumed() const {
292 return m_numArgsAndFlags
& kResumedMask
;
296 m_numArgsAndFlags
|= kResumedMask
;
299 bool isFromFPushCtor() const {
300 return m_numArgsAndFlags
& kFPushCtorMask
;
303 static inline uint32_t
304 encodeNumArgs(uint32_t numArgs
, bool localsDecRefd
, bool resumed
,
306 assert((numArgs
& kFlagsMask
) == 0);
308 (localsDecRefd
<< kLocalsDecRefdShift
) |
309 (resumed
<< kResumedShift
) |
310 (isFPushCtor
<< kFPushCtorShift
);
313 void initNumArgs(uint32_t numArgs
) {
314 m_numArgsAndFlags
= encodeNumArgs(numArgs
, false, false, false);
317 void initNumArgsFromResumable(uint32_t numArgs
) {
318 m_numArgsAndFlags
= encodeNumArgs(numArgs
, false, true, false);
321 void initNumArgsFromFPushCtor(uint32_t numArgs
) {
322 m_numArgsAndFlags
= encodeNumArgs(numArgs
, false, false, true);
325 void setNumArgs(uint32_t numArgs
) {
326 m_numArgsAndFlags
= encodeNumArgs(numArgs
, localsDecRefd(), resumed(),
330 void setLocalsDecRefd() {
331 assert(!localsDecRefd());
332 m_numArgsAndFlags
|= kLocalsDecRefdMask
;
335 static void* encodeThis(ObjectData
* obj
, Class
* cls
) {
337 if (cls
) return (char*)cls
+ 1;
341 static void* encodeThis(ObjectData
* obj
) { return obj
; }
342 static void* encodeClass(const Class
* cls
) {
343 return cls
? (char*)cls
+ 1 : nullptr;
345 static ObjectData
* decodeThis(void* p
) {
346 return (uintptr_t(p
) & 1) ? nullptr : (ObjectData
*)p
;
348 static Class
* decodeClass(void* p
) {
349 return (uintptr_t(p
) & 1) ? (Class
*)(uintptr_t(p
)&~1LL) : nullptr;
352 void setThisOrClass(void* objOrCls
) {
353 setThisOrClassAllowNull(objOrCls
);
354 assert(hasThis() || hasClass());
356 void setThisOrClassAllowNull(void* objOrCls
) {
357 m_this
= (ObjectData
*)objOrCls
;
360 void* getThisOrClass() const {
364 const Unit
* unit() const {
366 return func()->unit();
369 const Func
* func() const {
374 * To conserve space, we use unions for pairs of mutually exclusive
375 * fields (fields that are not used at the same time). We use unions
376 * for m_this/m_cls and m_varEnv/m_invName.
378 * The least significant bit is used as a marker for each pair of fields
379 * so that we can distinguish at runtime which field is valid. We define
380 * accessors below to encapsulate this logic.
382 * Note that m_invName is only used when the ActRec is pre-live. Thus when
383 * an ActRec is live it is safe to directly access m_varEnv without using
387 static constexpr int8_t kHasClassBit
= 0x1;
388 static constexpr int8_t kClassMask
= ~kHasClassBit
;
390 inline bool hasThis() const {
391 return m_this
&& !(reinterpret_cast<intptr_t>(m_this
) & kHasClassBit
);
393 inline ObjectData
* getThis() const {
397 inline void setThis(ObjectData
* val
) {
400 inline bool hasClass() const {
401 return reinterpret_cast<intptr_t>(m_cls
) & kHasClassBit
;
403 inline Class
* getClass() const {
405 return reinterpret_cast<Class
*>(
406 reinterpret_cast<intptr_t>(m_cls
) & kClassMask
);
408 inline void setClass(Class
* val
) {
409 m_cls
= reinterpret_cast<Class
*>(
410 reinterpret_cast<intptr_t>(val
) | kHasClassBit
);
413 // Note that reordering these is likely to require changes to the translator.
414 static constexpr int8_t kInvNameBit
= 0x1;
415 static constexpr int8_t kInvNameMask
= ~kInvNameBit
;
416 static constexpr int8_t kExtraArgsBit
= 0x2;
417 static constexpr int8_t kExtraArgsMask
= ~kExtraArgsBit
;
419 inline bool hasVarEnv() const {
421 !(reinterpret_cast<intptr_t>(m_varEnv
) & (kInvNameBit
| kExtraArgsBit
));
423 inline bool hasInvName() const {
424 return reinterpret_cast<intptr_t>(m_invName
) & kInvNameBit
;
426 inline bool hasExtraArgs() const {
427 return reinterpret_cast<intptr_t>(m_extraArgs
) & kExtraArgsBit
;
429 inline VarEnv
* getVarEnv() const {
433 inline StringData
* getInvName() const {
434 assert(hasInvName());
435 return reinterpret_cast<StringData
*>(
436 reinterpret_cast<intptr_t>(m_invName
) & kInvNameMask
);
438 inline ExtraArgs
* getExtraArgs() const {
439 return reinterpret_cast<ExtraArgs
*>(
440 reinterpret_cast<intptr_t>(m_extraArgs
) & kExtraArgsMask
);
442 inline void setVarEnv(VarEnv
* val
) {
445 inline void setInvName(StringData
* val
) {
446 m_invName
= reinterpret_cast<StringData
*>(
447 reinterpret_cast<intptr_t>(val
) | kInvNameBit
);
449 inline void setExtraArgs(ExtraArgs
* val
) {
450 m_extraArgs
= reinterpret_cast<ExtraArgs
*>(
451 reinterpret_cast<intptr_t>(val
) | kExtraArgsBit
);
454 // Accessors for extra arg queries.
455 TypedValue
* getExtraArg(unsigned ind
) const {
456 assert(hasExtraArgs() || hasVarEnv());
457 return hasExtraArgs() ? getExtraArgs()->getExtraArg(ind
) :
458 hasVarEnv() ? getVarEnv()->getExtraArg(ind
) :
459 static_cast<TypedValue
*>(0);
463 static_assert(offsetof(ActRec
, m_sfp
) == 0,
464 "m_sfp should be at offset 0 of ActRec");
466 inline int32_t arOffset(const ActRec
* ar
, const ActRec
* other
) {
467 return (intptr_t(other
) - intptr_t(ar
)) / sizeof(TypedValue
);
470 inline ActRec
* arAtOffset(const ActRec
* ar
, int32_t offset
) {
471 return (ActRec
*)(intptr_t(ar
) + intptr_t(offset
* sizeof(TypedValue
)));
474 inline ActRec
* arFromSpOffset(const ActRec
*sp
, int32_t offset
) {
475 return arAtOffset(sp
, offset
);
478 inline TypedValue
* arReturn(ActRec
* ar
, Variant
&& value
) {
479 ar
->m_r
= *value
.asTypedValue();
480 tvWriteNull(value
.asTypedValue());
484 template <bool crossBuiltin
> Class
* arGetContextClassImpl(const ActRec
* ar
);
485 template <> Class
* arGetContextClassImpl
<true>(const ActRec
* ar
);
486 template <> Class
* arGetContextClassImpl
<false>(const ActRec
* ar
);
487 inline Class
* arGetContextClass(const ActRec
* ar
) {
488 return arGetContextClassImpl
<false>(ar
);
490 inline Class
* arGetContextClassFromBuiltin(const ActRec
* ar
) {
491 return arGetContextClassImpl
<true>(ar
);
494 // Used by extension functions that take a PHP "callback", since they need to
495 // figure out the callback context once and call it multiple times. (e.g.
496 // array_map, array_filter, ...)
504 constexpr size_t kNumIterCells
= sizeof(Iter
) / sizeof(Cell
);
505 constexpr size_t kNumActRecCells
= sizeof(ActRec
) / sizeof(Cell
);
508 * We pad all stack overflow checks by a small amount to allow for three
511 * - inlining functions without having to either do another stack
512 * check (or chase down prologues to smash checks to be bigger).
514 * - omitting stack overflow checks on leaf functions
516 * - delaying stack overflow checks on reentry
518 constexpr int kStackCheckLeafPadding
= 20;
519 constexpr int kStackCheckReenterPadding
= 9;
520 constexpr int kStackCheckPadding
= kStackCheckLeafPadding
+
521 kStackCheckReenterPadding
;
523 constexpr int kInvalidRaiseLevel
= -1;
524 constexpr int kInvalidNesting
= -1;
527 enum class Type
: int16_t {
533 : m_raiseNesting(kInvalidNesting
),
534 m_raiseFrame(nullptr),
535 m_raiseOffset(kInvalidOffset
),
539 ObjectData
* m_userException
;
540 Exception
* m_cppException
;
544 // The VM nesting at the moment where the exception was thrown.
546 // The frame where the exception was thrown.
547 ActRec
* m_raiseFrame
;
548 // The offset within the frame where the exception was thrown.
549 // This value is updated when a fault is updated when exception
550 // chaining takes place. In this case the raise offset of the newly
551 // thrown exception is set to the offset of the previously thrown
552 // exception. The offset is also updated when the exception
553 // propagates outside its current frame.
554 Offset m_raiseOffset
;
555 // The number of EHs that were already examined for this exception.
556 // This is used to ensure that the same exception handler is not
557 // run twice for the same exception. The unwinder may be entered
558 // multiple times for the same fault as a result of calling Unwind.
559 // The field is used to skip through the EHs that were already run.
563 // Interpreter evaluation stack.
567 TypedValue
* m_base
; // Stack grows down, so m_base is beyond the end of
571 void* getStackLowAddress() const { return m_elms
; }
572 void* getStackHighAddress() const { return m_base
; }
573 bool isValidAddress(uintptr_t v
) {
574 return v
>= uintptr_t(m_elms
) && v
< uintptr_t(m_base
);
579 static const int sSurprisePageSize
;
580 static const unsigned sMinStackElms
;
581 static void ValidateStackSize();
585 std::string
toString(const ActRec
* fp
, int offset
,
586 std::string prefix
="") const;
588 bool wouldOverflow(int numCells
) const;
592 * topOfStackOffset --
594 * Accessors for the x64 translator. Do not play on or around.
600 static constexpr size_t topOfStackOffset() {
601 return offsetof(Stack
, m_top
);
604 static TypedValue
* frameStackBase(const ActRec
* fp
);
605 static TypedValue
* resumableStackBase(const ActRec
* fp
);
608 size_t count() const {
609 return ((uintptr_t)m_base
- (uintptr_t)m_top
) / sizeof(TypedValue
);
612 // Same as discard(), but meant to replace popC() iff the interpreter knows
613 // for certain that decrementing a refcount is unnecessary.
616 assert(m_top
!= m_base
);
617 assert(!IS_REFCOUNTED_TYPE(m_top
->m_type
));
624 assert(m_top
!= m_base
);
625 assert(cellIsPlausible(*m_top
));
626 tvRefcountedDecRefCell(m_top
);
633 assert(m_top
!= m_base
);
634 assert(m_top
->m_type
== KindOfClass
);
641 assert(m_top
!= m_base
);
642 assert(refIsPlausible(*m_top
));
650 assert(m_top
!= m_base
);
651 assert(m_top
->m_type
== KindOfClass
|| tvIsPlausible(*m_top
));
652 tvRefcountedDecRef(m_top
);
657 // popAR() should only be used to tear down a pre-live ActRec. Once
658 // an ActRec is live, it should be torn down using frame_free_locals()
659 // followed by discardAR() or ret().
662 assert(m_top
!= m_base
);
663 ActRec
* ar
= (ActRec
*)m_top
;
664 if (ar
->hasThis()) decRefObj(ar
->getThis());
665 if (ar
->hasInvName()) decRefStr(ar
->getInvName());
667 // This should only be used on a pre-live ActRec.
668 assert(!ar
->hasVarEnv());
669 assert(!ar
->hasExtraArgs());
675 assert(m_top
!= m_base
);
677 for (int i
= 0; i
< kNumActRecCells
; ++i
) {
678 tvDebugTrash(m_top
+ i
);
681 m_top
+= kNumActRecCells
;
682 assert((uintptr_t)m_top
<= (uintptr_t)m_base
);
687 // Leave part of the activation on the stack, since the return value now
690 for (int i
= 0; i
< kNumActRecCells
- 1; ++i
) {
691 tvDebugTrash(m_top
+ i
);
694 m_top
+= kNumActRecCells
- 1;
695 assert((uintptr_t)m_top
<= (uintptr_t)m_base
);
700 assert(m_top
!= m_base
);
706 void ndiscard(size_t n
) {
707 assert((uintptr_t)&m_top
[n
] <= (uintptr_t)m_base
);
709 for (int i
= 0; i
< n
; ++i
) {
710 tvDebugTrash(m_top
+ i
);
718 assert(m_top
!= m_base
);
719 assert(m_top
!= m_elms
);
720 assert(m_top
->m_type
!= KindOfRef
);
729 assert(m_top
!= m_base
);
730 assert(m_top
->m_type
!= KindOfRef
);
736 assert(m_top
!= m_base
);
742 assert(m_top
!= m_elms
);
744 tvWriteUninit(m_top
);
749 assert(m_top
!= m_elms
);
755 void pushNullUninit() {
756 assert(m_top
!= m_elms
);
758 m_top
->m_data
.num
= 0;
759 m_top
->m_type
= KindOfUninit
;
762 #define PUSH_METHOD(name, type, field, value) \
763 ALWAYS_INLINE void push##name() { \
764 assert(m_top != m_elms); \
766 m_top->m_data.field = value; \
767 m_top->m_type = type; \
769 PUSH_METHOD(True
, KindOfBoolean
, num
, 1)
770 PUSH_METHOD(False
, KindOfBoolean
, num
, 0)
772 #define PUSH_METHOD_ARG(name, type, field, argtype, arg) \
773 ALWAYS_INLINE void push##name(argtype arg) { \
774 assert(m_top != m_elms); \
776 m_top->m_data.field = arg; \
777 m_top->m_type = type; \
779 PUSH_METHOD_ARG(Bool
, KindOfBoolean
, num
, bool, b
)
780 PUSH_METHOD_ARG(Int
, KindOfInt64
, num
, int64_t, i
)
781 PUSH_METHOD_ARG(Double
, KindOfDouble
, dbl
, double, d
)
783 // This should only be called directly when the caller has
784 // already adjusted the refcount appropriately
786 void pushStringNoRc(StringData
* s
) {
787 assert(m_top
!= m_elms
);
789 m_top
->m_data
.pstr
= s
;
790 m_top
->m_type
= KindOfString
;
794 void pushStaticString(StringData
* s
) {
795 assert(s
->isStatic()); // No need to call s->incRefCount().
796 assert(m_top
!= m_elms
);
798 m_top
->m_data
.pstr
= s
;
799 m_top
->m_type
= KindOfStaticString
;
802 // This should only be called directly when the caller has
803 // already adjusted the refcount appropriately
805 void pushArrayNoRc(ArrayData
* a
) {
806 assert(m_top
!= m_elms
);
808 m_top
->m_data
.parr
= a
;
809 m_top
->m_type
= KindOfArray
;
813 void pushArray(ArrayData
* a
) {
820 void pushStaticArray(ArrayData
* a
) {
821 assert(a
->isStatic()); // No need to call a->incRefCount().
825 // This should only be called directly when the caller has
826 // already adjusted the refcount appropriately
828 void pushObjectNoRc(ObjectData
* o
) {
829 assert(m_top
!= m_elms
);
831 m_top
->m_data
.pobj
= o
;
832 m_top
->m_type
= KindOfObject
;
836 void pushObject(ObjectData
* o
) {
842 void nalloc(size_t n
) {
843 assert((uintptr_t)&m_top
[-n
] <= (uintptr_t)m_base
);
849 assert(m_top
!= m_elms
);
856 assert(m_top
!= m_elms
);
862 TypedValue
* allocTV() {
863 assert(m_top
!= m_elms
);
870 assert((uintptr_t)&m_top
[-kNumActRecCells
] >= (uintptr_t)m_elms
);
871 assert(kNumActRecCells
* sizeof(Cell
) == sizeof(ActRec
));
872 m_top
-= kNumActRecCells
;
873 return (ActRec
*)m_top
;
878 assert(kNumIterCells
* sizeof(Cell
) == sizeof(Iter
));
879 assert((uintptr_t)&m_top
[-kNumIterCells
] >= (uintptr_t)m_elms
);
880 m_top
-= kNumIterCells
;
884 void replaceC(const Cell
& c
) {
885 assert(m_top
!= m_base
);
886 assert(m_top
->m_type
!= KindOfRef
);
887 tvRefcountedDecRefCell(m_top
);
891 template <DataType DT
>
894 assert(m_top
!= m_base
);
895 assert(m_top
->m_type
!= KindOfRef
);
896 tvRefcountedDecRefCell(m_top
);
897 *m_top
= make_tv
<DT
>();
900 template <DataType DT
, typename T
>
902 void replaceC(T value
) {
903 assert(m_top
!= m_base
);
904 assert(m_top
->m_type
!= KindOfRef
);
905 tvRefcountedDecRefCell(m_top
);
906 *m_top
= make_tv
<DT
>(value
);
910 void replaceTV(const TypedValue
& tv
) {
911 assert(m_top
!= m_base
);
912 tvRefcountedDecRef(m_top
);
916 template <DataType DT
>
919 assert(m_top
!= m_base
);
920 tvRefcountedDecRef(m_top
);
921 *m_top
= make_tv
<DT
>();
924 template <DataType DT
, typename T
>
926 void replaceTV(T value
) {
927 assert(m_top
!= m_base
);
928 tvRefcountedDecRef(m_top
);
929 *m_top
= make_tv
<DT
>(value
);
934 assert(m_top
!= m_base
);
935 assert(m_top
->m_type
!= KindOfRef
);
941 assert(m_top
!= m_base
);
942 assert(m_top
->m_type
== KindOfRef
);
947 const Class
* topA() {
948 assert(m_top
!= m_base
);
949 assert(m_top
->m_type
== KindOfClass
);
950 return m_top
->m_data
.pcls
;
954 TypedValue
* topTV() {
955 assert(m_top
!= m_base
);
960 Cell
* indC(size_t ind
) {
961 assert(m_top
!= m_base
);
962 assert(m_top
[ind
].m_type
!= KindOfRef
);
963 return (Cell
*)(&m_top
[ind
]);
967 TypedValue
* indTV(size_t ind
) {
968 assert(m_top
!= m_base
);
973 void pushClass(Class
* clss
) {
974 assert(m_top
!= m_elms
);
976 m_top
->m_data
.pcls
= clss
;
977 m_top
->m_type
= KindOfClass
;
981 //////////////////////////////////////////////////////////////////////
984 * Visit all the slots and pre-live ActRecs on a live eval stack,
985 * handling FPI regions and resumables correctly, and stopping when we
986 * reach the supplied activation record.
988 * The stack elements are visited from lower address to higher, with
989 * ActRecs visited after the stack slots below them.
991 * This will not read the VM registers (pc, fp, sp), so it will
992 * perform the requested visitation independent of modifications to
993 * the VM stack or frame pointer.
995 template<class MaybeConstTVPtr
, class ARFun
, class TVFun
>
996 typename
std::enable_if
<
997 std::is_same
<MaybeConstTVPtr
,const TypedValue
*>::value
||
998 std::is_same
<MaybeConstTVPtr
, TypedValue
*>::value
1000 visitStackElems(const ActRec
* const fp
,
1001 MaybeConstTVPtr
const stackTop
,
1002 Offset
const bcOffset
,
1005 const TypedValue
* const base
=
1006 fp
->resumed() ? Stack::resumableStackBase(fp
)
1007 : Stack::frameStackBase(fp
);
1008 MaybeConstTVPtr cursor
= stackTop
;
1009 assert(cursor
<= base
);
1011 if (auto fe
= fp
->m_func
->findFPI(bcOffset
)) {
1014 if (!fp
->resumed()) {
1015 ar
= arAtOffset(fp
, -fe
->m_fpOff
);
1017 // fp is pointing into the Resumable struct. Since fpOff is
1018 // given as an offset from the frame pointer as if it were in
1019 // the normal place on the main stack, we have to reconstruct
1020 // that "normal place".
1021 auto const fakePrevFP
= reinterpret_cast<const ActRec
*>(
1022 base
+ fp
->m_func
->numSlotsInFrame()
1024 ar
= arAtOffset(fakePrevFP
, -fe
->m_fpOff
);
1027 assert(cursor
<= reinterpret_cast<TypedValue
*>(ar
));
1028 while (cursor
< reinterpret_cast<TypedValue
*>(ar
)) {
1033 cursor
+= kNumActRecCells
;
1034 if (fe
->m_parentIndex
== -1) break;
1035 fe
= &fp
->m_func
->fpitab()[fe
->m_parentIndex
];
1039 while (cursor
< base
) {
1044 ///////////////////////////////////////////////////////////////////////////////