Simplify region selector/pgo options, fix pgo for legacy/tracelet selectors
[hiphop-php.git] / hphp / runtime / vm / bytecode.h
blobb79cff817fc3ade91a3c232be6b5cb4b552e1a44
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2013 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #ifndef incl_HPHP_VM_BYTECODE_H_
18 #define incl_HPHP_VM_BYTECODE_H_
20 #include <type_traits>
21 #include <boost/optional.hpp>
23 #include "hphp/util/util.h"
24 #include "hphp/runtime/base/complex-types.h"
25 #include "hphp/runtime/base/tv-arith.h"
26 #include "hphp/runtime/base/tv-conversions.h"
27 #include "hphp/runtime/base/class-info.h"
28 #include "hphp/runtime/base/array-iterator.h"
29 #include "hphp/runtime/base/rds.h"
30 #include "hphp/runtime/base/rds-util.h"
31 #include "hphp/runtime/vm/class.h"
32 #include "hphp/runtime/vm/unit.h"
33 #include "hphp/runtime/vm/func.h"
34 #include "hphp/runtime/vm/name-value-table.h"
35 #include "hphp/runtime/vm/request-arena.h"
37 namespace HPHP {
39 /**
40 * These macros allow us to easily change the arguments to iop*() opcode
41 * implementations.
43 #define IOP_ARGS PC& pc
44 #define IOP_PASS_ARGS pc
45 #define IOP_PASS(pc) pc
47 ALWAYS_INLINE
48 void SETOP_BODY_CELL(Cell* lhs, SetOpOp op, Cell* rhs) {
49 assert(cellIsPlausible(*lhs));
50 assert(cellIsPlausible(*rhs));
52 switch (op) {
53 case SetOpOp::PlusEqual: cellAddEq(*lhs, *rhs); return;
54 case SetOpOp::MinusEqual: cellSubEq(*lhs, *rhs); return;
55 case SetOpOp::MulEqual: cellMulEq(*lhs, *rhs); return;
56 case SetOpOp::DivEqual: cellDivEq(*lhs, *rhs); return;
57 case SetOpOp::ModEqual: cellModEq(*lhs, *rhs); return;
58 case SetOpOp::ConcatEqual:
59 concat_assign(tvAsVariant(lhs), cellAsCVarRef(*rhs).toString());
60 return;
61 case SetOpOp::AndEqual: cellBitAndEq(*lhs, *rhs); return;
62 case SetOpOp::OrEqual: cellBitOrEq(*lhs, *rhs); return;
63 case SetOpOp::XorEqual: cellBitXorEq(*lhs, *rhs); return;
65 case SetOpOp::SlEqual:
66 cellCastToInt64InPlace(lhs);
67 lhs->m_data.num <<= cellToInt(*rhs);
68 return;
69 case SetOpOp::SrEqual:
70 cellCastToInt64InPlace(lhs);
71 lhs->m_data.num >>= cellToInt(*rhs);
72 return;
74 not_reached();
77 ALWAYS_INLINE
78 void SETOP_BODY(TypedValue* lhs, SetOpOp op, Cell* rhs) {
79 SETOP_BODY_CELL(tvToCell(lhs), op, rhs);
82 class Func;
83 struct ActRec;
85 // max number of arguments for direct call to builtin
86 const int kMaxBuiltinArgs = 5;
88 struct ExtraArgs : private boost::noncopyable {
90 * Allocate an ExtraArgs structure, with arguments copied from the
91 * evaluation stack. This takes ownership of the args without
92 * adjusting reference counts, so they must be discarded from the
93 * stack.
95 static ExtraArgs* allocateCopy(TypedValue* args, unsigned nargs);
98 * Allocate an ExtraArgs, without initializing any of the arguments.
99 * All arguments must be initialized via getExtraArg before
100 * deallocate() is called for the returned pointer.
102 static ExtraArgs* allocateUninit(unsigned nargs);
105 * Deallocate an extraArgs structure. Either use the one that
106 * exists in a ActRec, or do it explicitly.
108 static void deallocate(ActRec*);
109 static void deallocate(ExtraArgs*, unsigned numArgs);
112 * Get the slot for extra arg i, where i = argNum - func->numParams.
114 TypedValue* getExtraArg(unsigned argInd) const;
116 private:
117 ExtraArgs();
118 ~ExtraArgs();
120 static void* allocMem(unsigned nargs);
122 private:
123 TypedValue m_extraArgs[];
127 * Variable environment.
129 * A variable environment consists of the locals for the current function
130 * (either pseudo-main, global function, or method), plus any variables that
131 * are dynamically defined.
133 * Logically, a global function or method starts off with a variable
134 * environment that contains only its locals, but a pseudo-main is handed
135 * its caller's existing variable environment. Generally, however, we don't
136 * create a variable environment for global functions or methods until it
137 * actually needs one (i.e. if it is about to include a pseudo-main, or if
138 * it uses dynamic variable lookups).
140 * Named locals always appear in the expected place on the stack, even after
141 * a VarEnv is attached. Internally uses a NameValueTable to hook up names to
142 * the local locations.
144 class VarEnv {
145 private:
146 ExtraArgs* m_extraArgs;
147 uint16_t m_depth;
148 bool m_malloced;
149 bool m_global;
150 ActRec* m_cfp;
151 // TODO remove vector (#1099580). Note: trying changing this to a
152 // TinyVector<> for now increased icache misses, but maybe will be
153 // feasable later (see D511561).
154 std::vector<TypedValue**> m_restoreLocations;
155 boost::optional<NameValueTable> m_nvTable;
157 private:
158 explicit VarEnv();
159 explicit VarEnv(ActRec* fp, ExtraArgs* eArgs);
160 VarEnv(const VarEnv&);
161 VarEnv& operator=(const VarEnv&);
162 ~VarEnv();
164 void ensureNvt();
166 public:
168 * Creates a VarEnv and attaches it to the existing frame fp.
170 * A lazy attach works by bringing all currently existing values for
171 * the names in fp->m_func into the variable environment. This is
172 * used when we need a variable environment for some caller frame
173 * (because we're about to attach a callee frame using attach()) but
174 * don't actually have one.
176 static VarEnv* createLocalOnStack(ActRec* fp);
177 static VarEnv* createLocalOnHeap(ActRec* fp);
179 // Allocate a global VarEnv. Initially not attached to any frame.
180 static VarEnv* createGlobal();
182 static void destroy(VarEnv*);
184 static size_t getObjectSz(ActRec* fp);
186 void attach(ActRec* fp);
187 void detach(ActRec* fp);
189 void set(const StringData* name, TypedValue* tv);
190 void bind(const StringData* name, TypedValue* tv);
191 void setWithRef(const StringData* name, TypedValue* tv);
192 TypedValue* lookup(const StringData* name);
193 TypedValue* lookupAdd(const StringData* name);
194 bool unset(const StringData* name);
196 Array getDefinedVariables() const;
198 // Used for save/store m_cfp for debugger
199 void setCfp(ActRec* fp) { m_cfp = fp; }
200 ActRec* getCfp() const { return m_cfp; }
201 bool isGlobalScope() const { return m_global; }
203 // Access to wrapped ExtraArgs, if we have one.
204 TypedValue* getExtraArg(unsigned argInd) const;
208 * An "ActRec" is a call activation record. The ordering of the fields assumes
209 * that stacks grow toward lower addresses.
211 * For most purposes, an ActRec can be considered to be in one of three
212 * possible states:
213 * Pre-live:
214 * After the FPush* instruction which materialized the ActRec on the stack
215 * but before the corresponding FCall instruction
216 * Live:
217 * After the corresponding FCall instruction but before the ActRec fields
218 * and locals/iters have been decref'd (either by return or unwinding)
219 * Post-live:
220 * After the ActRec fields and locals/iters have been decref'd
222 * Note that when a function is invoked by the runtime via invokeFunc(), the
223 * "pre-live" state is skipped and the ActRec is materialized in the "live"
224 * state.
226 struct ActRec {
227 union {
228 // This pair of uint64_t's must be the first two elements in the structure
229 // so that the pointer to the ActRec can also be used for RBP chaining.
230 // Note that ActRec's are also x64 frames, so this is an implicit machine
231 // dependency.
232 TypedValue _dummyA;
233 struct {
234 uint64_t m_savedRbp; // Previous hardware frame pointer/ActRec.
235 uint64_t m_savedRip; // In-TC address to return to.
238 union {
239 TypedValue _dummyB;
240 struct {
241 const Func* m_func; // Function.
242 uint32_t m_soff; // Saved offset of caller from beginning of
243 // caller's Func's bytecode.
245 // Bits 0-30 are the number of function args; the high bit is
246 // whether this ActRec came from FPushCtor*.
247 uint32_t m_numArgsAndCtorFlag;
250 union {
251 TypedValue m_r; // Return value teleported here when the ActRec
252 // is post-live.
253 struct {
254 union {
255 ObjectData* m_this; // This.
256 Class* m_cls; // Late bound class.
258 union {
259 VarEnv* m_varEnv; // Variable environment; only used when the
260 // ActRec is live.
261 ExtraArgs* m_extraArgs; // Light-weight extra args; used only when the
262 // ActRec is live.
263 StringData* m_invName; // Invoked function name (used for __call);
264 // only used when ActRec is pre-live.
269 // Get the next outermost VM frame, but if this is
270 // a re-entry frame, return ar
271 ActRec* arGetSfp() const;
273 // skip this frame if it is for a builtin function
274 bool skipFrame() const;
277 * Accessors for the packed m_numArgsAndCtorFlag field. We track
278 * whether ActRecs came from FPushCtor* so that during unwinding we
279 * can set the flag not to call destructors for objects whose
280 * constructors exit via an exception.
283 int32_t numArgs() const {
284 return decodeNumArgs(m_numArgsAndCtorFlag).first;
287 bool isFromFPushCtor() const {
288 return decodeNumArgs(m_numArgsAndCtorFlag).second;
291 static inline uint32_t
292 encodeNumArgs(uint32_t numArgs, bool isFPushCtor = false) {
293 assert((numArgs & (1u << 31)) == 0);
294 return numArgs | (isFPushCtor << 31);
297 static inline std::pair<uint32_t,bool>
298 decodeNumArgs(uint32_t numArgs) {
299 return { numArgs & ~(1u << 31), numArgs & (1u << 31) };
302 void initNumArgs(uint32_t numArgs, bool isFPushCtor = false) {
303 m_numArgsAndCtorFlag = encodeNumArgs(numArgs, isFPushCtor);
306 void setNumArgs(uint32_t numArgs) {
307 initNumArgs(numArgs, isFromFPushCtor());
310 static void* encodeThis(ObjectData* obj, Class* cls) {
311 if (obj) return obj;
312 if (cls) return (char*)cls + 1;
313 not_reached();
316 static void* encodeThis(ObjectData* obj) { return obj; }
317 static void* encodeClass(const Class* cls) {
318 return cls ? (char*)cls + 1 : nullptr;
320 static ObjectData* decodeThis(void* p) {
321 return (uintptr_t(p) & 1) ? nullptr : (ObjectData*)p;
323 static Class* decodeClass(void* p) {
324 return (uintptr_t(p) & 1) ? (Class*)(uintptr_t(p)&~1LL) : nullptr;
327 void setThisOrClass(void* objOrCls) {
328 setThisOrClassAllowNull(objOrCls);
329 assert(hasThis() || hasClass());
331 void setThisOrClassAllowNull(void* objOrCls) {
332 m_this = (ObjectData*)objOrCls;
335 void* getThisOrClass() const {
336 return m_this;
339 const Unit* unit() const {
340 func()->validate();
341 return func()->unit();
344 const Func* func() const {
345 return m_func;
349 * To conserve space, we use unions for pairs of mutually exclusive
350 * fields (fields that are not used at the same time). We use unions
351 * for m_this/m_cls and m_varEnv/m_invName.
353 * The least significant bit is used as a marker for each pair of fields
354 * so that we can distinguish at runtime which field is valid. We define
355 * accessors below to encapsulate this logic.
357 * Note that m_invName is only used when the ActRec is pre-live. Thus when
358 * an ActRec is live it is safe to directly access m_varEnv without using
359 * accessors.
362 #define UNION_FIELD_ACCESSORS2(name1, type1, field1, name2, type2, field2) \
363 inline bool has##name1() const { \
364 return field1 && !(intptr_t(field1) & 1LL); \
366 inline bool has##name2() const { \
367 return bool(intptr_t(field2) & 1LL); \
369 inline type1 get##name1() const { \
370 assert(has##name1()); \
371 return field1; \
373 inline type2 get##name2() const { \
374 assert(has##name2()); \
375 return (type2)(intptr_t(field2) & ~1LL); \
377 inline void set##name1(type1 val) { \
378 field1 = val; \
380 inline void set##name2(type2 val) { \
381 field2 = (type2)(intptr_t(val) | 1LL); \
384 #define UNION_FIELD_ACCESSORS3(name1, type1, field1, name2, type2, field2, name3, type3, field3) \
385 inline bool has##name1() const { \
386 return field1 && !(intptr_t(field1) & 3LL); \
388 inline bool has##name2() const { \
389 return bool(intptr_t(field2) & 1LL); \
391 inline bool has##name3() const { \
392 return bool(intptr_t(field3) & 2LL); \
394 inline type1 get##name1() const { \
395 assert(has##name1()); \
396 return field1; \
398 inline type2 get##name2() const { \
399 assert(has##name2()); \
400 return (type2)(intptr_t(field2) & ~1LL); \
402 inline type3 get##name3() const { \
403 return (type3)(intptr_t(field3) & ~2LL); \
405 inline void set##name1(type1 val) { \
406 field1 = val; \
408 inline void set##name2(type2 val) { \
409 field2 = (type2)(intptr_t(val) | 1LL); \
411 inline void set##name3(type3 val) { \
412 field3 = (type3)(intptr_t(val) | 2LL); \
415 // Note that reordering these is likely to require changes to the
416 // translator.
417 UNION_FIELD_ACCESSORS2(This, ObjectData*, m_this, \
418 Class, Class*, m_cls)
419 static const int8_t kInvNameBit = 0x1;
420 static const int8_t kExtraArgsBit = 0x2;
421 UNION_FIELD_ACCESSORS3(VarEnv, VarEnv*, m_varEnv, \
422 InvName, StringData*, m_invName, \
423 ExtraArgs, ExtraArgs*, m_extraArgs)
425 #undef UNION_FIELD_ACCESSORS2
426 #undef UNION_FIELD_ACCESSORS3
428 // Accessors for extra arg queries.
429 TypedValue* getExtraArg(unsigned ind) const {
430 assert(hasExtraArgs() || hasVarEnv());
431 return hasExtraArgs() ? getExtraArgs()->getExtraArg(ind) :
432 hasVarEnv() ? getVarEnv()->getExtraArg(ind) :
433 static_cast<TypedValue*>(0);
437 inline int32_t arOffset(const ActRec* ar, const ActRec* other) {
438 return (intptr_t(other) - intptr_t(ar)) / sizeof(TypedValue);
441 inline ActRec* arAtOffset(const ActRec* ar, int32_t offset) {
442 return (ActRec*)(intptr_t(ar) + intptr_t(offset * sizeof(TypedValue)));
445 inline ActRec* arFromSpOffset(const ActRec *sp, int32_t offset) {
446 return arAtOffset(sp, offset);
449 inline void arSetSfp(ActRec* ar, const ActRec* sfp) {
450 static_assert(offsetof(ActRec, m_savedRbp) == 0,
451 "m_savedRbp should be at offset 0 of ActRec");
452 static_assert(sizeof(ActRec*) <= sizeof(uint64_t),
453 "ActRec* must be <= 64 bits");
454 ar->m_savedRbp = (uint64_t)sfp;
457 template <bool crossBuiltin> Class* arGetContextClassImpl(const ActRec* ar);
458 template <> Class* arGetContextClassImpl<true>(const ActRec* ar);
459 template <> Class* arGetContextClassImpl<false>(const ActRec* ar);
460 inline Class* arGetContextClass(const ActRec* ar) {
461 return arGetContextClassImpl<false>(ar);
463 inline Class* arGetContextClassFromBuiltin(const ActRec* ar) {
464 return arGetContextClassImpl<true>(ar);
467 // Used by extension functions that take a PHP "callback", since they need to
468 // figure out the callback context once and call it multiple times. (e.g.
469 // array_map, array_filter, ...)
470 struct CallCtx {
471 const Func* func;
472 ObjectData* this_;
473 Class* cls;
474 StringData* invName;
477 constexpr size_t kNumIterCells = sizeof(Iter) / sizeof(Cell);
478 constexpr size_t kNumActRecCells = sizeof(ActRec) / sizeof(Cell);
481 * We pad all stack overflow checks by a small amount to allow for three
482 * things:
484 * - inlining functions without having to either do another stack
485 * check (or chase down prologues to smash checks to be bigger).
487 * - omitting stack overflow checks on leaf functions
489 * - delaying stack overflow checks on reentry
491 constexpr int kStackCheckLeafPadding = 20;
492 constexpr int kStackCheckReenterPadding = 9;
493 constexpr int kStackCheckPadding = kStackCheckLeafPadding +
494 kStackCheckReenterPadding;
496 constexpr int kInvalidRaiseLevel = -1;
497 constexpr int kInvalidNesting = -1;
499 struct Fault {
500 enum class Type : int16_t {
501 UserException,
502 CppException
505 explicit Fault()
506 : m_raiseNesting(kInvalidNesting),
507 m_raiseFrame(nullptr),
508 m_raiseOffset(kInvalidOffset),
509 m_handledCount(0) {}
511 union {
512 ObjectData* m_userException;
513 Exception* m_cppException;
515 Type m_faultType;
517 // The VM nesting at the moment where the exception was thrown.
518 int m_raiseNesting;
519 // The frame where the exception was thrown.
520 ActRec* m_raiseFrame;
521 // The offset within the frame where the exception was thrown.
522 // This value is updated when a fault is updated when exception
523 // chaining takes place. In this case the raise offset of the newly
524 // thrown exception is set to the offset of the previously thrown
525 // exception. The offset is also updated when the exception
526 // propagates outside its current frame.
527 Offset m_raiseOffset;
528 // The number of EHs that were already examined for this exception.
529 // This is used to ensure that the same exception handler is not
530 // run twice for the same exception. The unwinder may be entered
531 // multiple times for the same fault as a result of calling Unwind.
532 // The field is used to skip through the EHs that were already run.
533 int m_handledCount;
536 // Interpreter evaluation stack.
537 class Stack {
538 TypedValue* m_elms;
539 TypedValue* m_top;
540 TypedValue* m_base; // Stack grows down, so m_base is beyond the end of
541 // m_elms.
543 public:
544 void* getStackLowAddress() const { return m_elms; }
545 void* getStackHighAddress() const { return m_base; }
546 bool isValidAddress(uintptr_t v) {
547 return v >= uintptr_t(m_elms) && v < uintptr_t(m_base);
549 void requestInit();
550 void requestExit();
552 private:
553 void toStringFrame(std::ostream& os, const ActRec* fp,
554 int offset, const TypedValue* ftop,
555 const std::string& prefix) const;
557 public:
558 static const int sSurprisePageSize;
559 static const uint sMinStackElms;
560 static void ValidateStackSize();
561 Stack();
562 ~Stack();
564 std::string toString(const ActRec* fp, int offset,
565 std::string prefix="") const;
567 bool wouldOverflow(int numCells) const;
570 * top --
571 * topOfStackOffset --
573 * Accessors for the x64 translator. Do not play on or around.
575 TypedValue*& top() {
576 return m_top;
579 static inline size_t topOfStackOffset() {
580 Stack *that = 0;
581 return (size_t)&that->m_top;
584 static TypedValue* frameStackBase(const ActRec* fp);
585 static TypedValue* generatorStackBase(const ActRec* fp);
587 ALWAYS_INLINE
588 size_t count() const {
589 return ((uintptr_t)m_base - (uintptr_t)m_top) / sizeof(TypedValue);
592 // Same as discard(), but meant to replace popC() iff the interpreter knows
593 // for certain that decrementing a refcount is unnecessary.
594 ALWAYS_INLINE
595 void popX() {
596 assert(m_top != m_base);
597 assert(!IS_REFCOUNTED_TYPE(m_top->m_type));
598 tvDebugTrash(m_top);
599 m_top++;
602 ALWAYS_INLINE
603 void popC() {
604 assert(m_top != m_base);
605 assert(cellIsPlausible(*m_top));
606 tvRefcountedDecRefCell(m_top);
607 tvDebugTrash(m_top);
608 m_top++;
611 ALWAYS_INLINE
612 void popA() {
613 assert(m_top != m_base);
614 assert(m_top->m_type == KindOfClass);
615 tvDebugTrash(m_top);
616 m_top++;
619 ALWAYS_INLINE
620 void popV() {
621 assert(m_top != m_base);
622 assert(refIsPlausible(*m_top));
623 tvDecRefRef(m_top);
624 tvDebugTrash(m_top);
625 m_top++;
628 ALWAYS_INLINE
629 void popTV() {
630 assert(m_top != m_base);
631 assert(m_top->m_type == KindOfClass || tvIsPlausible(*m_top));
632 tvRefcountedDecRef(m_top);
633 tvDebugTrash(m_top);
634 m_top++;
637 // popAR() should only be used to tear down a pre-live ActRec. Once
638 // an ActRec is live, it should be torn down using frame_free_locals()
639 // followed by discardAR() or ret().
640 ALWAYS_INLINE
641 void popAR() {
642 assert(m_top != m_base);
643 ActRec* ar = (ActRec*)m_top;
644 if (ar->hasThis()) decRefObj(ar->getThis());
645 if (ar->hasInvName()) decRefStr(ar->getInvName());
647 // This should only be used on a pre-live ActRec.
648 assert(!ar->hasVarEnv());
649 assert(!ar->hasExtraArgs());
650 discardAR();
653 ALWAYS_INLINE
654 void discardAR() {
655 assert(m_top != m_base);
656 if (debug) {
657 for (int i = 0; i < kNumActRecCells; ++i) {
658 tvDebugTrash(m_top + i);
661 m_top += kNumActRecCells;
662 assert((uintptr_t)m_top <= (uintptr_t)m_base);
665 ALWAYS_INLINE
666 void ret() {
667 // Leave part of the activation on the stack, since the return value now
668 // resides there.
669 if (debug) {
670 for (int i = 0; i < kNumActRecCells - 1; ++i) {
671 tvDebugTrash(m_top + i);
674 m_top += kNumActRecCells - 1;
675 assert((uintptr_t)m_top <= (uintptr_t)m_base);
678 ALWAYS_INLINE
679 void discard() {
680 assert(m_top != m_base);
681 tvDebugTrash(m_top);
682 m_top++;
685 ALWAYS_INLINE
686 void ndiscard(size_t n) {
687 assert((uintptr_t)&m_top[n] <= (uintptr_t)m_base);
688 if (debug) {
689 for (int i = 0; i < n; ++i) {
690 tvDebugTrash(m_top + i);
693 m_top += n;
696 ALWAYS_INLINE
697 void dup() {
698 assert(m_top != m_base);
699 assert(m_top != m_elms);
700 assert(m_top->m_type != KindOfRef);
701 Cell* fr = m_top;
702 m_top--;
703 Cell* to = m_top;
704 cellDup(*fr, *to);
707 ALWAYS_INLINE
708 void box() {
709 assert(m_top != m_base);
710 assert(m_top->m_type != KindOfRef);
711 tvBox(m_top);
714 ALWAYS_INLINE
715 void unbox() {
716 assert(m_top != m_base);
717 tvUnbox(m_top);
720 ALWAYS_INLINE
721 void pushUninit() {
722 assert(m_top != m_elms);
723 m_top--;
724 tvWriteUninit(m_top);
727 ALWAYS_INLINE
728 void pushNull() {
729 assert(m_top != m_elms);
730 m_top--;
731 tvWriteNull(m_top);
734 ALWAYS_INLINE
735 void pushNullUninit() {
736 assert(m_top != m_elms);
737 m_top--;
738 m_top->m_data.num = 0;
739 m_top->m_type = KindOfUninit;
742 #define PUSH_METHOD(name, type, field, value) \
743 ALWAYS_INLINE void push##name() { \
744 assert(m_top != m_elms); \
745 m_top--; \
746 m_top->m_data.field = value; \
747 m_top->m_type = type; \
749 PUSH_METHOD(True, KindOfBoolean, num, 1)
750 PUSH_METHOD(False, KindOfBoolean, num, 0)
752 #define PUSH_METHOD_ARG(name, type, field, argtype, arg) \
753 ALWAYS_INLINE void push##name(argtype arg) { \
754 assert(m_top != m_elms); \
755 m_top--; \
756 m_top->m_data.field = arg; \
757 m_top->m_type = type; \
759 PUSH_METHOD_ARG(Bool, KindOfBoolean, num, bool, b)
760 PUSH_METHOD_ARG(Int, KindOfInt64, num, int64_t, i)
761 PUSH_METHOD_ARG(Double, KindOfDouble, dbl, double, d)
763 // This should only be called directly when the caller has
764 // already adjusted the refcount appropriately
765 ALWAYS_INLINE
766 void pushStringNoRc(StringData* s) {
767 assert(m_top != m_elms);
768 m_top--;
769 m_top->m_data.pstr = s;
770 m_top->m_type = KindOfString;
773 ALWAYS_INLINE
774 void pushStaticString(StringData* s) {
775 assert(s->isStatic()); // No need to call s->incRefCount().
776 assert(m_top != m_elms);
777 m_top--;
778 m_top->m_data.pstr = s;
779 m_top->m_type = KindOfStaticString;
782 // This should only be called directly when the caller has
783 // already adjusted the refcount appropriately
784 ALWAYS_INLINE
785 void pushArrayNoRc(ArrayData* a) {
786 assert(m_top != m_elms);
787 m_top--;
788 m_top->m_data.parr = a;
789 m_top->m_type = KindOfArray;
792 ALWAYS_INLINE
793 void pushArray(ArrayData* a) {
794 assert(a);
795 pushArrayNoRc(a);
796 a->incRefCount();
799 ALWAYS_INLINE
800 void pushStaticArray(ArrayData* a) {
801 assert(a->isStatic()); // No need to call a->incRefCount().
802 pushArrayNoRc(a);
805 // This should only be called directly when the caller has
806 // already adjusted the refcount appropriately
807 ALWAYS_INLINE
808 void pushObjectNoRc(ObjectData* o) {
809 assert(m_top != m_elms);
810 m_top--;
811 m_top->m_data.pobj = o;
812 m_top->m_type = KindOfObject;
815 ALWAYS_INLINE
816 void pushObject(ObjectData* o) {
817 pushObjectNoRc(o);
818 o->incRefCount();
821 ALWAYS_INLINE
822 void nalloc(size_t n) {
823 assert((uintptr_t)&m_top[-n] <= (uintptr_t)m_base);
824 m_top -= n;
827 ALWAYS_INLINE
828 Cell* allocC() {
829 assert(m_top != m_elms);
830 m_top--;
831 return (Cell*)m_top;
834 ALWAYS_INLINE
835 Ref* allocV() {
836 assert(m_top != m_elms);
837 m_top--;
838 return (Ref*)m_top;
841 ALWAYS_INLINE
842 TypedValue* allocTV() {
843 assert(m_top != m_elms);
844 m_top--;
845 return m_top;
848 ALWAYS_INLINE
849 ActRec* allocA() {
850 assert((uintptr_t)&m_top[-kNumActRecCells] >= (uintptr_t)m_elms);
851 assert(kNumActRecCells * sizeof(Cell) == sizeof(ActRec));
852 m_top -= kNumActRecCells;
853 return (ActRec*)m_top;
856 ALWAYS_INLINE
857 void allocI() {
858 assert(kNumIterCells * sizeof(Cell) == sizeof(Iter));
859 assert((uintptr_t)&m_top[-kNumIterCells] >= (uintptr_t)m_elms);
860 m_top -= kNumIterCells;
863 ALWAYS_INLINE
864 void replaceC(const Cell& c) {
865 assert(m_top != m_base);
866 assert(m_top->m_type != KindOfRef);
867 tvRefcountedDecRefCell(m_top);
868 *m_top = c;
871 template <DataType DT>
872 ALWAYS_INLINE
873 void replaceC() {
874 assert(m_top != m_base);
875 assert(m_top->m_type != KindOfRef);
876 tvRefcountedDecRefCell(m_top);
877 *m_top = make_tv<DT>();
880 template <DataType DT, typename T>
881 ALWAYS_INLINE
882 void replaceC(T value) {
883 assert(m_top != m_base);
884 assert(m_top->m_type != KindOfRef);
885 tvRefcountedDecRefCell(m_top);
886 *m_top = make_tv<DT>(value);
889 ALWAYS_INLINE
890 void replaceTV(const TypedValue& tv) {
891 assert(m_top != m_base);
892 tvRefcountedDecRef(m_top);
893 *m_top = tv;
896 template <DataType DT>
897 ALWAYS_INLINE
898 void replaceTV() {
899 assert(m_top != m_base);
900 tvRefcountedDecRef(m_top);
901 *m_top = make_tv<DT>();
904 template <DataType DT, typename T>
905 ALWAYS_INLINE
906 void replaceTV(T value) {
907 assert(m_top != m_base);
908 tvRefcountedDecRef(m_top);
909 *m_top = make_tv<DT>(value);
912 ALWAYS_INLINE
913 Cell* topC() {
914 assert(m_top != m_base);
915 assert(m_top->m_type != KindOfRef);
916 return (Cell*)m_top;
919 ALWAYS_INLINE
920 Ref* topV() {
921 assert(m_top != m_base);
922 assert(m_top->m_type == KindOfRef);
923 return (Ref*)m_top;
926 ALWAYS_INLINE
927 TypedValue* topTV() {
928 assert(m_top != m_base);
929 return m_top;
932 ALWAYS_INLINE
933 Cell* indC(size_t ind) {
934 assert(m_top != m_base);
935 assert(m_top[ind].m_type != KindOfRef);
936 return (Cell*)(&m_top[ind]);
939 ALWAYS_INLINE
940 TypedValue* indTV(size_t ind) {
941 assert(m_top != m_base);
942 return &m_top[ind];
945 ALWAYS_INLINE
946 void pushClass(Class* clss) {
947 assert(m_top != m_elms);
948 m_top--;
949 m_top->m_data.pcls = clss;
950 m_top->m_type = KindOfClass;
954 //////////////////////////////////////////////////////////////////////
957 * Visit all the slots and pre-live ActRecs on a live eval stack,
958 * handling FPI regions and generators correctly, and stopping when we
959 * reach the supplied activation record.
961 * The stack elements are visited from lower address to higher, with
962 * ActRecs visited after the stack slots below them.
964 * This will not read the VM registers (pc, fp, sp), so it will
965 * perform the requested visitation independent of modifications to
966 * the VM stack or frame pointer.
968 template<class MaybeConstTVPtr, class ARFun, class TVFun>
969 typename std::enable_if<
970 std::is_same<MaybeConstTVPtr,const TypedValue*>::value ||
971 std::is_same<MaybeConstTVPtr, TypedValue*>::value
972 >::type
973 visitStackElems(const ActRec* const fp,
974 MaybeConstTVPtr const stackTop,
975 Offset const bcOffset,
976 ARFun arFun,
977 TVFun tvFun) {
978 const TypedValue* const base =
979 fp->m_func->isGenerator() ? Stack::generatorStackBase(fp)
980 : Stack::frameStackBase(fp);
981 MaybeConstTVPtr cursor = stackTop;
982 assert(cursor <= base);
984 if (auto fe = fp->m_func->findFPI(bcOffset)) {
985 for (;;) {
986 ActRec* ar;
987 if (!fp->m_func->isGenerator()) {
988 ar = arAtOffset(fp, -fe->m_fpOff);
989 } else {
990 // fp is pointing into the continuation object. Since fpOff is
991 // given as an offset from the frame pointer as if it were in
992 // the normal place on the main stack, we have to reconstruct
993 // that "normal place".
994 auto const fakePrevFP = reinterpret_cast<const ActRec*>(
995 base + fp->m_func->numSlotsInFrame()
997 ar = arAtOffset(fakePrevFP, -fe->m_fpOff);
1000 assert(cursor <= reinterpret_cast<TypedValue*>(ar));
1001 while (cursor < reinterpret_cast<TypedValue*>(ar)) {
1002 tvFun(cursor++);
1004 arFun(ar);
1006 cursor += kNumActRecCells;
1007 if (fe->m_parentIndex == -1) break;
1008 fe = &fp->m_func->fpitab()[fe->m_parentIndex];
1012 while (cursor < base) {
1013 tvFun(cursor++);
1017 ///////////////////////////////////////////////////////////////////////////////
1021 #endif