2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2013 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/vm/bytecode.h"
23 #include "folly/String.h"
25 #include "hphp/runtime/base/tv_comparisons.h"
26 #include "hphp/runtime/base/tv_conversions.h"
27 #include "hphp/runtime/base/tv_arith.h"
28 #include "hphp/compiler/builtin_symbols.h"
29 #include "hphp/runtime/vm/event_hook.h"
30 #include "hphp/runtime/vm/jit/translator.h"
31 #include "hphp/runtime/vm/srckey.h"
32 #include "hphp/runtime/vm/member_operations.h"
33 #include "hphp/runtime/base/class_info.h"
34 #include "hphp/runtime/base/code_coverage.h"
35 #include "hphp/runtime/base/file_repository.h"
36 #include "hphp/runtime/base/base_includes.h"
37 #include "hphp/runtime/base/execution_context.h"
38 #include "hphp/runtime/base/runtime_option.h"
39 #include "hphp/runtime/base/array/hphp_array.h"
40 #include "hphp/runtime/base/strings.h"
41 #include "hphp/util/util.h"
42 #include "hphp/util/trace.h"
43 #include "hphp/util/debug.h"
44 #include "hphp/runtime/base/stat_cache.h"
45 #include "hphp/runtime/base/shared/shared_variant.h"
46 #include "hphp/runtime/vm/debug/debug.h"
48 #include "hphp/runtime/vm/hhbc.h"
49 #include "hphp/runtime/vm/treadmill.h"
50 #include "hphp/runtime/vm/php_debug.h"
51 #include "hphp/runtime/vm/debugger_hook.h"
52 #include "hphp/runtime/vm/runtime.h"
53 #include "hphp/runtime/vm/jit/targetcache.h"
54 #include "hphp/runtime/vm/type_constraint.h"
55 #include "hphp/runtime/vm/unwind.h"
56 #include "hphp/runtime/vm/jit/translator-inline.h"
57 #include "hphp/runtime/ext/ext_string.h"
58 #include "hphp/runtime/ext/ext_error.h"
59 #include "hphp/runtime/ext/ext_closure.h"
60 #include "hphp/runtime/ext/ext_continuation.h"
61 #include "hphp/runtime/ext/ext_function.h"
62 #include "hphp/runtime/ext/ext_variable.h"
63 #include "hphp/runtime/ext/ext_array.h"
64 #include "hphp/runtime/base/stats.h"
65 #include "hphp/runtime/vm/type_profile.h"
66 #include "hphp/runtime/base/server/source_root_info.h"
67 #include "hphp/runtime/base/util/extended_logger.h"
68 #include "hphp/runtime/base/memory/tracer.h"
70 #include "hphp/system/systemlib.h"
71 #include "hphp/runtime/ext/ext_collections.h"
73 #include "hphp/runtime/vm/name_value_table_wrapper.h"
74 #include "hphp/runtime/vm/request_arena.h"
75 #include "hphp/util/arena.h"
80 #include <boost/format.hpp>
81 #include <boost/utility/typed_in_place_factory.hpp>
90 // TODO: #1746957, #1756122
91 // we should skip the call in call_user_func_array, if
92 // by reference params are passed by value, or if its
93 // argument is not an array, but currently lots of tests
94 // depend on actually making the call.
95 const bool skipCufOnInvalidParams
= false;
97 // RepoAuthoritative has been raptured out of runtime_option.cpp. It needs
98 // to be closer to other bytecode.cpp data.
99 bool RuntimeOption::RepoAuthoritative
= false;
103 using Transl::VMRegAnchor
;
104 using Transl::EagerVMRegAnchor
;
107 #define OPTBLD_INLINE
109 #define OPTBLD_INLINE ALWAYS_INLINE
111 TRACE_SET_MOD(bcinterp
);
113 ActRec
* ActRec::arGetSfp() const {
114 ActRec
* prevFrame
= (ActRec
*)m_savedRbp
;
115 if (LIKELY(((uintptr_t)prevFrame
- Util::s_stackLimit
) >=
116 Util::s_stackSize
)) {
117 if (LIKELY(prevFrame
!= nullptr)) return prevFrame
;
120 return const_cast<ActRec
*>(this);
124 ActRec::skipFrame() const {
125 return m_func
&& m_func
->skipFrame();
129 Class
* arGetContextClassImpl
<false>(const ActRec
* ar
) {
133 return ar
->m_func
->cls();
137 Class
* arGetContextClassImpl
<true>(const ActRec
* ar
) {
141 if (ar
->m_func
->isPseudoMain() || ar
->m_func
->isBuiltin()) {
142 // Pseudomains inherit the context of their caller
143 VMExecutionContext
* context
= g_vmContext
;
144 ar
= context
->getPrevVMState(ar
);
145 while (ar
!= nullptr &&
146 (ar
->m_func
->isPseudoMain() || ar
->m_func
->isBuiltin())) {
147 ar
= context
->getPrevVMState(ar
);
153 return ar
->m_func
->cls();
156 const StaticString
s_call_user_func("call_user_func");
157 const StaticString
s_call_user_func_array("call_user_func_array");
158 const StaticString
s_stdclass("stdclass");
159 const StaticString
s___call("__call");
160 const StaticString
s___callStatic("__callStatic");
161 const StaticString
s_file("file");
162 const StaticString
s_line("line");
163 const StaticString
s_function("function");
164 const StaticString
s_args("args");
165 const StaticString
s_class("class");
166 const StaticString
s_object("object");
167 const StaticString
s_type("type");
168 const StaticString
s_include("include");
171 Transl::Translator
* tx() {
172 return Transl::Translator::Get();
175 ///////////////////////////////////////////////////////////////////////////////
177 //=============================================================================
178 // Miscellaneous macros.
181 #define DECODE_JMP(type, var) \
182 type var __attribute__((unused)) = *(type*)pc; \
184 Trace::trace("decode: Immediate %s %" PRIi64"\n", #type, \
186 #define ITER_SKIP(offset) pc = origPc + (offset);
188 #define DECODE(type, var) \
189 DECODE_JMP(type, var); \
191 #define DECODE_IVA(var) \
192 int32_t var UNUSED = decodeVariableSizeImm(&pc); \
194 Trace::trace("decode: Immediate int32 %" PRIi64"\n", \
196 #define DECODE_LITSTR(var) \
200 var = m_fp->m_func->unit()->lookupLitstrId(id); \
203 #define DECODE_HA(var) DECODE_IVA(var)
204 #define DECODE_IA(var) DECODE_IVA(var)
206 #define DECODE_ITER_LIST(typeList, idList, vecLen) \
207 DECODE(int32_t, vecLen); \
208 assert(vecLen > 0); \
209 Id* typeList = (Id*)pc; \
210 Id* idList = (Id*)pc + 1; \
211 pc += 2 * vecLen * sizeof(Id);
213 #define SYNC() m_pc = pc
215 //=============================================================================
216 // Miscellaneous helpers.
218 static inline Class
* frameStaticClass(ActRec
* fp
) {
220 return fp
->getThis()->getVMClass();
221 } else if (fp
->hasClass()) {
222 return fp
->getClass();
228 //=============================================================================
236 , m_nvTable(boost::in_place
<NameValueTable
>(
237 RuntimeOption::EvalVMInitialGlobalTableSize
))
239 TypedValue globalArray
;
240 globalArray
.m_type
= KindOfArray
;
241 globalArray
.m_data
.parr
=
242 new (request_arena()) GlobalNameValueTableWrapper(&*m_nvTable
);
243 globalArray
.m_data
.parr
->incRefCount();
244 m_nvTable
->set(StringData::GetStaticString("GLOBALS"), &globalArray
);
245 tvRefcountedDecRef(&globalArray
);
248 VarEnv::VarEnv(ActRec
* fp
, ExtraArgs
* eArgs
)
255 const Func
* func
= fp
->m_func
;
256 const Id numNames
= func
->numNamedLocals();
258 if (!numNames
) return;
260 m_nvTable
= boost::in_place
<NameValueTable
>(numNames
);
262 TypedValue
** origLocs
=
263 reinterpret_cast<TypedValue
**>(uintptr_t(this) + sizeof(VarEnv
));
264 TypedValue
* loc
= frame_local(fp
, 0);
265 for (Id i
= 0; i
< numNames
; ++i
, --loc
) {
266 assert(func
->lookupVarId(func
->localVarName(i
)) == (int)i
);
267 origLocs
[i
] = m_nvTable
->migrateSet(func
->localVarName(i
), loc
);
272 TRACE(3, "Destroying VarEnv %p [%s]\n",
274 isGlobalScope() ? "global scope" : "local scope");
275 assert(m_restoreLocations
.empty());
277 if (!isGlobalScope()) {
278 if (LIKELY(!m_malloced
)) {
279 varenv_arena().endFrame();
284 * When detaching the global scope, we leak any live objects (and
285 * let the smart allocator clean them up). This is because we're
286 * not supposed to run destructors for objects that are live at
287 * the end of a request.
293 size_t VarEnv::getObjectSz(ActRec
* fp
) {
294 return sizeof(VarEnv
) + sizeof(TypedValue
*) * fp
->m_func
->numNamedLocals();
297 VarEnv
* VarEnv::createLocalOnStack(ActRec
* fp
) {
298 auto& va
= varenv_arena();
300 void* mem
= va
.alloc(getObjectSz(fp
));
301 VarEnv
* ret
= new (mem
) VarEnv(fp
, fp
->getExtraArgs());
302 TRACE(3, "Creating lazily attached VarEnv %p on stack\n", mem
);
306 VarEnv
* VarEnv::createLocalOnHeap(ActRec
* fp
) {
307 void* mem
= malloc(getObjectSz(fp
));
308 VarEnv
* ret
= new (mem
) VarEnv(fp
, fp
->getExtraArgs());
309 TRACE(3, "Creating lazily attached VarEnv %p on heap\n", mem
);
310 ret
->m_malloced
= true;
314 VarEnv
* VarEnv::createGlobal() {
315 assert(!g_vmContext
->m_globalVarEnv
);
317 VarEnv
* ret
= new (request_arena()) VarEnv();
318 TRACE(3, "Creating VarEnv %p [global scope]\n", ret
);
319 ret
->m_global
= true;
320 g_vmContext
->m_globalVarEnv
= ret
;
324 void VarEnv::destroy(VarEnv
* ve
) {
325 bool malloced
= ve
->m_malloced
;
327 if (UNLIKELY(malloced
)) free(ve
);
330 void VarEnv::attach(ActRec
* fp
) {
331 TRACE(3, "Attaching VarEnv %p [%s] %d fp @%p\n",
333 isGlobalScope() ? "global scope" : "local scope",
334 int(fp
->m_func
->numNamedLocals()), fp
);
335 assert(m_depth
== 0 || fp
->arGetSfp() == m_cfp
||
336 (fp
->arGetSfp() == fp
&& g_vmContext
->isNested()));
340 // Overlay fp's locals, if it has any.
342 const Func
* func
= fp
->m_func
;
343 const Id numNames
= func
->numNamedLocals();
348 m_nvTable
= boost::in_place
<NameValueTable
>(numNames
);
351 TypedValue
** origLocs
= new (varenv_arena()) TypedValue
*[
352 func
->numNamedLocals()];
353 TypedValue
* loc
= frame_local(fp
, 0);
354 for (Id i
= 0; i
< numNames
; ++i
, --loc
) {
355 assert(func
->lookupVarId(func
->localVarName(i
)) == (int)i
);
356 origLocs
[i
] = m_nvTable
->migrate(func
->localVarName(i
), loc
);
358 m_restoreLocations
.push_back(origLocs
);
361 void VarEnv::detach(ActRec
* fp
) {
362 TRACE(3, "Detaching VarEnv %p [%s] @%p\n",
364 isGlobalScope() ? "global scope" : "local scope",
369 // Merge/remove fp's overlaid locals, if it had any.
370 const Func
* func
= fp
->m_func
;
371 if (Id
const numLocals
= func
->numNamedLocals()) {
373 * In the case of a lazily attached VarEnv, we have our locations
374 * for the first (lazy) attach stored immediately following the
375 * VarEnv in memory. In this case m_restoreLocations will be empty.
377 assert((!isGlobalScope() && m_depth
== 1) == m_restoreLocations
.empty());
378 TypedValue
** origLocs
=
379 !m_restoreLocations
.empty()
380 ? m_restoreLocations
.back()
381 : reinterpret_cast<TypedValue
**>(uintptr_t(this) + sizeof(VarEnv
));
383 for (Id i
= 0; i
< numLocals
; i
++) {
384 m_nvTable
->resettle(func
->localVarName(i
), origLocs
[i
]);
386 if (!m_restoreLocations
.empty()) {
387 m_restoreLocations
.pop_back();
391 VMExecutionContext
* context
= g_vmContext
;
392 m_cfp
= context
->getPrevVMState(fp
);
396 // don't free global varEnv
397 if (context
->m_globalVarEnv
!= this) {
398 assert(!isGlobalScope());
404 // This helper is creating a NVT because of dynamic variable accesses,
405 // even though we're already attached to a frame and it had no named
407 void VarEnv::ensureNvt() {
408 const size_t kLazyNvtSize
= 3;
410 m_nvTable
= boost::in_place
<NameValueTable
>(kLazyNvtSize
);
414 void VarEnv::set(const StringData
* name
, TypedValue
* tv
) {
416 m_nvTable
->set(name
, tv
);
419 void VarEnv::bind(const StringData
* name
, TypedValue
* tv
) {
421 m_nvTable
->bind(name
, tv
);
424 void VarEnv::setWithRef(const StringData
* name
, TypedValue
* tv
) {
425 if (tv
->m_type
== KindOfRef
) {
432 TypedValue
* VarEnv::lookup(const StringData
* name
) {
436 return m_nvTable
->lookup(name
);
439 TypedValue
* VarEnv::lookupAdd(const StringData
* name
) {
441 return m_nvTable
->lookupAdd(name
);
444 TypedValue
* VarEnv::lookupRawPointer(const StringData
* name
) {
446 return m_nvTable
->lookupRawPointer(name
);
449 TypedValue
* VarEnv::lookupAddRawPointer(const StringData
* name
) {
451 return m_nvTable
->lookupAddRawPointer(name
);
454 bool VarEnv::unset(const StringData
* name
) {
455 if (!m_nvTable
) return true;
456 m_nvTable
->unset(name
);
460 Array
VarEnv::getDefinedVariables() const {
461 Array ret
= Array::Create();
463 if (!m_nvTable
) return ret
;
465 NameValueTable::Iterator
iter(&*m_nvTable
);
466 for (; iter
.valid(); iter
.next()) {
467 const StringData
* sd
= iter
.curKey();
468 const TypedValue
* tv
= iter
.curVal();
469 if (tvAsCVarRef(tv
).isReferenced()) {
470 ret
.setRef(StrNR(sd
).asString(), tvAsCVarRef(tv
));
472 ret
.add(StrNR(sd
).asString(), tvAsCVarRef(tv
));
479 TypedValue
* VarEnv::getExtraArg(unsigned argInd
) const {
480 return m_extraArgs
->getExtraArg(argInd
);
483 //=============================================================================
485 ExtraArgs::ExtraArgs() {}
486 ExtraArgs::~ExtraArgs() {}
488 void* ExtraArgs::allocMem(unsigned nargs
) {
489 return smart_malloc(sizeof(TypedValue
) * nargs
+ sizeof(ExtraArgs
));
492 ExtraArgs
* ExtraArgs::allocateCopy(TypedValue
* args
, unsigned nargs
) {
493 void* mem
= allocMem(nargs
);
494 ExtraArgs
* ea
= new (mem
) ExtraArgs();
497 * The stack grows downward, so the args in memory are "backward"; i.e. the
498 * leftmost (in PHP) extra arg is highest in memory.
500 std::reverse_copy(args
, args
+ nargs
, &ea
->m_extraArgs
[0]);
504 ExtraArgs
* ExtraArgs::allocateUninit(unsigned nargs
) {
505 void* mem
= ExtraArgs::allocMem(nargs
);
506 return new (mem
) ExtraArgs();
509 void ExtraArgs::deallocate(ExtraArgs
* ea
, unsigned nargs
) {
512 for (unsigned i
= 0; i
< nargs
; ++i
) {
513 tvRefcountedDecRef(ea
->m_extraArgs
+ i
);
519 void ExtraArgs::deallocate(ActRec
* ar
) {
520 const int numExtra
= ar
->numArgs() - ar
->m_func
->numParams();
521 deallocate(ar
->getExtraArgs(), numExtra
);
524 TypedValue
* ExtraArgs::getExtraArg(unsigned argInd
) const {
525 return const_cast<TypedValue
*>(&m_extraArgs
[argInd
]);
528 //=============================================================================
531 // Store actual stack elements array in a thread-local in order to amortize the
532 // cost of allocation.
535 StackElms() : m_elms(nullptr) {}
540 if (m_elms
== nullptr) {
541 // RuntimeOption::EvalVMStackElms-sized and -aligned.
542 size_t algnSz
= RuntimeOption::EvalVMStackElms
* sizeof(TypedValue
);
543 if (posix_memalign((void**)&m_elms
, algnSz
, algnSz
) != 0) {
544 throw std::runtime_error(
545 std::string("VM stack initialization failed: ") + strerror(errno
));
551 if (m_elms
!= nullptr) {
559 IMPLEMENT_THREAD_LOCAL(StackElms
, t_se
);
561 const int Stack::sSurprisePageSize
= sysconf(_SC_PAGESIZE
);
562 // We reserve the bottom page of each stack for use as the surprise
563 // page, so the minimum useful stack size is the next power of two.
564 const uint
Stack::sMinStackElms
= 2 * sSurprisePageSize
/ sizeof(TypedValue
);
566 void Stack::ValidateStackSize() {
567 if (RuntimeOption::EvalVMStackElms
< sMinStackElms
) {
568 throw std::runtime_error(str(
569 boost::format("VM stack size of 0x%llx is below the minimum of 0x%x")
570 % RuntimeOption::EvalVMStackElms
573 if (!Util::isPowerOfTwo(RuntimeOption::EvalVMStackElms
)) {
574 throw std::runtime_error(str(
575 boost::format("VM stack size of 0x%llx is not a power of 2")
576 % RuntimeOption::EvalVMStackElms
));
581 : m_elms(nullptr), m_top(nullptr), m_base(nullptr) {
590 if (Transl::trustSigSegv
) {
591 mprotect(m_elms
, sizeof(void*), PROT_NONE
);
597 if (Transl::trustSigSegv
) {
598 mprotect(m_elms
, sizeof(void*), PROT_READ
| PROT_WRITE
);
603 Stack::requestInit() {
604 m_elms
= t_se
->elms();
605 if (Transl::trustSigSegv
) {
606 RequestInjectionData
& data
= ThreadInfo::s_threadInfo
->m_reqInjectionData
;
607 Lock
l(data
.surpriseLock
);
608 assert(data
.surprisePage
== nullptr);
609 data
.surprisePage
= m_elms
;
611 // Burn one element of the stack, to satisfy the constraint that
612 // valid m_top values always have the same high-order (>
613 // log(RuntimeOption::EvalVMStackElms)) bits.
614 m_top
= m_base
= m_elms
+ RuntimeOption::EvalVMStackElms
- 1;
616 // Because of the surprise page at the bottom of the stack we lose an
617 // additional 256 elements which must be taken into account when checking for
619 UNUSED
size_t maxelms
=
620 RuntimeOption::EvalVMStackElms
- sSurprisePageSize
/ sizeof(TypedValue
);
621 assert(!wouldOverflow(maxelms
- 1));
622 assert(wouldOverflow(maxelms
));
624 // Reset permissions on our stack's surprise page
629 Stack::requestExit() {
630 if (m_elms
!= nullptr) {
631 if (Transl::trustSigSegv
) {
632 RequestInjectionData
& data
= ThreadInfo::s_threadInfo
->m_reqInjectionData
;
633 Lock
l(data
.surpriseLock
);
634 assert(data
.surprisePage
== m_elms
);
636 data
.surprisePage
= nullptr;
642 void flush_evaluation_stack() {
643 if (g_context
.isNull()) {
644 // For RPCRequestHandler threads, the ExecutionContext can stay alive
645 // across requests, and hold references to the VM stack, and
646 // the TargetCache needs to keep track of which classes are live etc
647 // So only flush the VM stack and the target cache if the execution
650 if (!t_se
.isNull()) {
653 Transl::TargetCache::flush();
657 static std::string
toStringElm(const TypedValue
* tv
) {
658 std::ostringstream os
;
660 if (tv
->m_type
< MinDataType
|| tv
->m_type
> MaxNumDataTypes
) {
661 os
<< " ??? type " << tv
->m_type
<< "\n";
665 assert(tv
->m_type
>= MinDataType
&& tv
->m_type
< MaxNumDataTypes
);
666 if (IS_REFCOUNTED_TYPE(tv
->m_type
) && tv
->m_data
.pref
->_count
<= 0) {
667 // OK in the invoking frame when running a destructor.
668 os
<< " ??? inner_count " << tv
->m_data
.pref
->_count
<< " ";
672 switch (tv
->m_type
) {
675 os
<< "@" << tv
->m_data
.pref
;
676 os
<< toStringElm(tv
->m_data
.pref
->tv());
687 switch (tv
->m_type
) {
695 os
<< (tv
->m_data
.num
? "True" : "False");
698 os
<< "0x" << std::hex
<< tv
->m_data
.num
<< std::dec
;
701 os
<< tv
->m_data
.dbl
;
703 case KindOfStaticString
:
706 int len
= tv
->m_data
.pstr
->size();
707 bool truncated
= false;
712 os
<< tv
->m_data
.pstr
713 << "c(" << tv
->m_data
.pstr
->getCount() << ")"
715 << Util::escapeStringForCPP(tv
->m_data
.pstr
->data(), len
)
716 << "\"" << (truncated
? "..." : "");
720 assert(tv
->m_data
.parr
->getCount() > 0);
721 os
<< tv
->m_data
.parr
722 << "c(" << tv
->m_data
.parr
->getCount() << ")"
726 assert(tv
->m_data
.pobj
->getCount() > 0);
727 os
<< tv
->m_data
.pobj
728 << "c(" << tv
->m_data
.pobj
->getCount() << ")"
730 << tvAsCVarRef(tv
).asCObjRef().get()->o_getClassName().get()->data()
736 os
<< tv
->m_data
.pcls
737 << ":" << tv
->m_data
.pcls
->name()->data();
747 static std::string
toStringIter(const Iter
* it
, bool itRef
) {
748 if (itRef
) return "I:MutableArray";
750 // TODO(#2458166): it might be a CufIter, but we're just lucky that
751 // the bit pattern for the CufIter is going to have a 0 in
752 // getIterType for now.
753 switch (it
->arr().getIterType()) {
754 case ArrayIter::TypeUndefined
:
755 return "I:Undefined";
756 case ArrayIter::TypeArray
:
758 case ArrayIter::TypeIterator
:
765 void Stack::toStringFrame(std::ostream
& os
, const ActRec
* fp
,
766 int offset
, const TypedValue
* ftop
,
767 const string
& prefix
) const {
770 // Use depth-first recursion to output the most deeply nested stack frame
774 TypedValue
* prevStackTop
= nullptr;
775 ActRec
* prevFp
= g_vmContext
->getPrevVMState(fp
, &prevPc
, &prevStackTop
);
776 if (prevFp
!= nullptr) {
777 toStringFrame(os
, prevFp
, prevPc
, prevStackTop
, prefix
);
782 const Func
* func
= fp
->m_func
;
785 string
funcName(func
->fullName()->data());
786 os
<< "{func:" << funcName
787 << ",soff:" << fp
->m_soff
788 << ",this:0x" << std::hex
<< (fp
->hasThis() ? fp
->getThis() : nullptr)
790 TypedValue
* tv
= (TypedValue
*)fp
;
793 if (func
->numLocals() > 0) {
795 int n
= func
->numLocals();
796 for (int i
= 0; i
< n
; i
++, tv
--) {
800 os
<< toStringElm(tv
);
805 assert(!func
->info() || func
->numIterators() == 0);
806 if (func
->numIterators() > 0) {
808 Iter
* it
= &((Iter
*)&tv
[1])[-1];
809 for (int i
= 0; i
< func
->numIterators(); i
++, it
--) {
814 if (func
->checkIterScope(offset
, i
, itRef
)) {
815 os
<< toStringIter(it
, itRef
);
823 std::vector
<std::string
> stackElems
;
826 [&](const ActRec
* ar
) {
827 stackElems
.push_back(
828 folly::format("{{func:{}}}", ar
->m_func
->fullName()->data()).str()
831 [&](const TypedValue
* tv
) {
832 stackElems
.push_back(toStringElm(tv
));
835 std::reverse(stackElems
.begin(), stackElems
.end());
836 os
<< ' ' << folly::join(' ', stackElems
);
841 string
Stack::toString(const ActRec
* fp
, int offset
,
842 const string prefix
/* = "" */) const {
843 // The only way to figure out which stack elements are activation records is
844 // to follow the frame chain. However, the goal for each stack frame is to
845 // print stack fragments from deepest to shallowest -- a then b in the
846 // following example:
848 // {func:foo,soff:51}<C:8> {func:bar} C:8 C:1 {func:biz} C:0
849 // aaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbb
851 // Use depth-first recursion to get the output order correct.
853 std::ostringstream os
;
854 os
<< prefix
<< "=== Stack at " << curUnit()->filepath()->data() << ":" <<
855 curUnit()->getLineNumber(curUnit()->offsetOf(vmpc())) << " func " <<
856 curFunc()->fullName()->data() << " ===\n";
858 toStringFrame(os
, fp
, offset
, m_top
, prefix
);
863 bool Stack::wouldOverflow(int numCells
) const {
864 // The funny approach here is to validate the translator's assembly
865 // technique. We've aligned and sized the stack so that the high order
866 // bits of valid cells are all the same. In the translator, numCells
867 // can be hardcoded, and m_top is wired into a register,
868 // so the expression requires no loads.
869 intptr_t truncatedTop
= intptr_t(m_top
) / sizeof(TypedValue
);
870 truncatedTop
&= RuntimeOption::EvalVMStackElms
- 1;
871 intptr_t diff
= truncatedTop
- numCells
-
872 sSurprisePageSize
/ sizeof(TypedValue
);
876 TypedValue
* Stack::frameStackBase(const ActRec
* fp
) {
877 const Func
* func
= fp
->m_func
;
878 assert(!func
->isGenerator());
879 return (TypedValue
*)((uintptr_t)fp
880 - (uintptr_t)(func
->numLocals()) * sizeof(TypedValue
)
881 - (uintptr_t)(func
->numIterators() * sizeof(Iter
)));
884 TypedValue
* Stack::generatorStackBase(const ActRec
* fp
) {
885 assert(fp
->m_func
->isGenerator());
886 VMExecutionContext
* context
= g_vmContext
;
887 ActRec
* sfp
= fp
->arGetSfp();
889 // In the reentrant case, we can consult the savedVM state. We simply
890 // use the top of stack of the previous VM frame (since the ActRec,
891 // locals, and iters for this frame do not reside on the VM stack).
892 return context
->m_nestedVMs
.back().m_savedState
.sp
;
894 // In the non-reentrant case, we know generators are always called from a
895 // function with an empty stack. So we find the caller's FP, compensate
896 // for its locals, and then we've found the base of the generator's stack.
897 return (TypedValue
*)sfp
- sfp
->m_func
->numSlotsInFrame();
901 __thread RequestArenaStorage s_requestArenaStorage
;
902 __thread VarEnvArenaStorage s_varEnvArenaStorage
;
905 //=============================================================================
908 using namespace HPHP
;
909 using namespace HPHP::MethodLookup
;
911 ActRec
* VMExecutionContext::getOuterVMFrame(const ActRec
* ar
) {
912 ActRec
* prevFrame
= (ActRec
*)ar
->m_savedRbp
;
913 if (LIKELY(((uintptr_t)prevFrame
- Util::s_stackLimit
) >=
914 Util::s_stackSize
)) {
915 if (LIKELY(prevFrame
!= nullptr)) return prevFrame
;
918 if (LIKELY(!m_nestedVMs
.empty())) return m_nestedVMs
.back().m_savedState
.fp
;
922 TypedValue
* VMExecutionContext::lookupClsCns(const NamedEntity
* ne
,
923 const StringData
* cls
,
924 const StringData
* cns
) {
925 Class
* class_
= Unit::loadClass(ne
, cls
);
926 if (class_
== nullptr) {
927 raise_error(Strings::UNKNOWN_CLASS
, cls
->data());
929 TypedValue
* clsCns
= class_
->clsCnsGet(cns
);
930 if (clsCns
== nullptr) {
931 raise_error("Couldn't find constant %s::%s",
932 cls
->data(), cns
->data());
937 TypedValue
* VMExecutionContext::lookupClsCns(const StringData
* cls
,
938 const StringData
* cns
) {
939 return lookupClsCns(Unit::GetNamedEntity(cls
), cls
, cns
);
942 // Look up the method specified by methodName from the class specified by cls
943 // and enforce accessibility. Accessibility checks depend on the relationship
944 // between the class that first declared the method (baseClass) and the context
947 // If there are multiple accessible methods with the specified name declared in
948 // cls and ancestors of cls, the method from the most derived class will be
949 // returned, except if we are doing an ObjMethod call ("$obj->foo()") and there
950 // is an accessible private method, in which case the accessible private method
953 // Accessibility rules:
955 // | baseClass/ctx relationship | public | protected | private |
956 // +----------------------------+--------+-----------+---------+
957 // | anon/unrelated | yes | no | no |
958 // | baseClass == ctx | yes | yes | yes |
959 // | baseClass derived from ctx | yes | yes | no |
960 // | ctx derived from baseClass | yes | yes | no |
961 // +----------------------------+--------+-----------+---------+
963 const Func
* VMExecutionContext::lookupMethodCtx(const Class
* cls
,
964 const StringData
* methodName
,
967 bool raise
/* = false */) {
969 if (callType
== CallType::CtorMethod
) {
970 assert(methodName
== nullptr);
971 method
= cls
->getCtor();
973 assert(callType
== CallType::ObjMethod
|| callType
== CallType::ClsMethod
);
974 assert(methodName
!= nullptr);
975 method
= cls
->lookupMethod(methodName
);
977 static StringData
* sd__construct
978 = StringData::GetStaticString("__construct");
979 if (UNLIKELY(methodName
== sd__construct
)) {
980 // We were looking up __construct and failed to find it. Fall back
981 // to old-style constructor: same as class name.
982 method
= cls
->getCtor();
983 if (!Func::isSpecial(method
->name())) break;
986 raise_error("Call to undefined method %s::%s from %s%s",
989 ctx
? "context " : "anonymous context",
990 ctx
? ctx
->name()->data() : "");
996 bool accessible
= true;
997 // If we found a protected or private method, we need to do some
998 // accessibility checks.
999 if ((method
->attrs() & (AttrProtected
|AttrPrivate
)) &&
1000 !g_vmContext
->getDebuggerBypassCheck()) {
1001 Class
* baseClass
= method
->baseCls();
1003 // If the context class is the same as the class that first
1004 // declared this method, then we know we have the right method
1005 // and we can stop here.
1006 if (ctx
== baseClass
) {
1009 // The anonymous context cannot access protected or private methods,
1010 // so we can fail fast here.
1011 if (ctx
== nullptr) {
1013 raise_error("Call to %s method %s::%s from anonymous context",
1014 (method
->attrs() & AttrPrivate
) ? "private" : "protected",
1015 cls
->name()->data(),
1016 method
->name()->data());
1021 if (method
->attrs() & AttrPrivate
) {
1022 // The context class is not the same as the class that declared
1023 // this private method, so this private method is not accessible.
1024 // We need to keep going because the context class may define a
1025 // private method with this name.
1028 // If the context class is derived from the class that first
1029 // declared this protected method, then we know this method is
1030 // accessible and we know the context class cannot have a private
1031 // method with the same name, so we're done.
1032 if (ctx
->classof(baseClass
)) {
1035 if (!baseClass
->classof(ctx
)) {
1036 // The context class is not the same, an ancestor, or a descendent
1037 // of the class that first declared this protected method, so
1038 // this method is not accessible. Because the context class is
1039 // not the same or an ancestor of the class which first declared
1040 // the method, we know that the context class is not the same
1041 // or an ancestor of cls, and therefore we don't need to check
1042 // if the context class declares a private method with this name,
1043 // so we can fail fast here.
1045 raise_error("Call to protected method %s::%s from context %s",
1046 cls
->name()->data(),
1047 method
->name()->data(),
1048 ctx
->name()->data());
1052 // We now know this protected method is accessible, but we need to
1053 // keep going because the context class may define a private method
1055 assert(accessible
&& baseClass
->classof(ctx
));
1058 // If this is an ObjMethod call ("$obj->foo()") AND there is an ancestor
1059 // of cls that declares a private method with this name AND the context
1060 // class is an ancestor of cls, check if the context class declares a
1061 // private method with this name.
1062 if (method
->hasPrivateAncestor() && callType
== CallType::ObjMethod
&&
1063 ctx
&& cls
->classof(ctx
)) {
1064 const Func
* ctxMethod
= ctx
->lookupMethod(methodName
);
1065 if (ctxMethod
&& ctxMethod
->cls() == ctx
&&
1066 (ctxMethod
->attrs() & AttrPrivate
)) {
1067 // For ObjMethod calls a private method from the context class
1068 // trumps any other method we may have found.
1076 raise_error("Call to private method %s::%s from %s%s",
1077 method
->baseCls()->name()->data(),
1078 method
->name()->data(),
1079 ctx
? "context " : "anonymous context",
1080 ctx
? ctx
->name()->data() : "");
1085 LookupResult
VMExecutionContext::lookupObjMethod(const Func
*& f
,
1087 const StringData
* methodName
,
1088 bool raise
/* = false */) {
1089 Class
* ctx
= arGetContextClass(getFP());
1090 f
= lookupMethodCtx(cls
, methodName
, ctx
, CallType::ObjMethod
, false);
1092 f
= cls
->lookupMethod(s___call
.get());
1095 // Throw a fatal error
1096 lookupMethodCtx(cls
, methodName
, ctx
, CallType::ObjMethod
, true);
1098 return LookupResult::MethodNotFound
;
1100 return LookupResult::MagicCallFound
;
1102 if (f
->attrs() & AttrStatic
&& !f
->isClosureBody()) {
1103 return LookupResult::MethodFoundNoThis
;
1105 return LookupResult::MethodFoundWithThis
;
1109 VMExecutionContext::lookupClsMethod(const Func
*& f
,
1111 const StringData
* methodName
,
1114 bool raise
/* = false */) {
1115 Class
* ctx
= arGetContextClass(vmfp
);
1116 f
= lookupMethodCtx(cls
, methodName
, ctx
, CallType::ClsMethod
, false);
1118 if (obj
&& obj
->instanceof(cls
)) {
1119 f
= obj
->getVMClass()->lookupMethod(s___call
.get());
1122 f
= cls
->lookupMethod(s___callStatic
.get());
1125 // Throw a fatal errpr
1126 lookupMethodCtx(cls
, methodName
, ctx
, CallType::ClsMethod
, true);
1128 return LookupResult::MethodNotFound
;
1132 assert(f
->attrs() & AttrStatic
);
1133 return LookupResult::MagicCallStaticFound
;
1137 // __call cannot be static, this should be enforced by semantic
1138 // checks defClass time or earlier
1139 assert(!(f
->attrs() & AttrStatic
));
1140 return LookupResult::MagicCallFound
;
1142 if (obj
&& !(f
->attrs() & AttrStatic
) && obj
->instanceof(cls
)) {
1143 return LookupResult::MethodFoundWithThis
;
1145 return LookupResult::MethodFoundNoThis
;
1148 LookupResult
VMExecutionContext::lookupCtorMethod(const Func
*& f
,
1150 bool raise
/* = false */) {
1152 if (!(f
->attrs() & AttrPublic
)) {
1153 Class
* ctx
= arGetContextClass(getFP());
1154 f
= lookupMethodCtx(cls
, nullptr, ctx
, CallType::CtorMethod
, raise
);
1156 // If raise was true than lookupMethodCtx should have thrown,
1157 // so we should only be able to get here if raise was false
1159 return LookupResult::MethodNotFound
;
1162 return LookupResult::MethodFoundWithThis
;
1165 ObjectData
* VMExecutionContext::createObject(StringData
* clsName
,
1167 bool init
/* = true */) {
1168 Class
* class_
= Unit::loadClass(clsName
);
1169 if (class_
== nullptr) {
1170 throw_missing_class(clsName
->data());
1173 o
= newInstance(class_
);
1177 invokeFunc(&ret
, class_
->getCtor(), params
, o
.get());
1178 tvRefcountedDecRef(&ret
);
1181 ObjectData
* ret
= o
.detach();
1186 ObjectData
* VMExecutionContext::createObjectOnly(StringData
* clsName
) {
1187 return createObject(clsName
, null_array
, false);
1190 ActRec
* VMExecutionContext::getStackFrame() {
1195 ObjectData
* VMExecutionContext::getThis() {
1197 ActRec
* fp
= getFP();
1198 if (fp
->skipFrame()) {
1199 fp
= getPrevVMState(fp
);
1200 if (!fp
) return nullptr;
1202 if (fp
->hasThis()) {
1203 return fp
->getThis();
1208 Class
* VMExecutionContext::getContextClass() {
1210 ActRec
* ar
= getFP();
1211 assert(ar
!= nullptr);
1212 if (ar
->skipFrame()) {
1213 ar
= getPrevVMState(ar
);
1214 if (!ar
) return nullptr;
1216 return ar
->m_func
->cls();
1219 Class
* VMExecutionContext::getParentContextClass() {
1220 if (Class
* ctx
= getContextClass()) {
1221 return ctx
->parent();
1226 CStrRef
VMExecutionContext::getContainingFileName() {
1228 ActRec
* ar
= getFP();
1229 if (ar
== nullptr) return empty_string
;
1230 if (ar
->skipFrame()) {
1231 ar
= getPrevVMState(ar
);
1232 if (ar
== nullptr) return empty_string
;
1234 Unit
* unit
= ar
->m_func
->unit();
1235 return unit
->filepathRef();
1238 int VMExecutionContext::getLine() {
1240 ActRec
* ar
= getFP();
1241 Unit
* unit
= ar
? ar
->m_func
->unit() : nullptr;
1242 Offset pc
= unit
? pcOff() : 0;
1243 if (ar
== nullptr) return -1;
1244 if (ar
->skipFrame()) {
1245 ar
= getPrevVMState(ar
, &pc
);
1247 if (ar
== nullptr || (unit
= ar
->m_func
->unit()) == nullptr) return -1;
1248 return unit
->getLineNumber(pc
);
1251 Array
VMExecutionContext::getCallerInfo() {
1253 Array result
= Array::Create();
1254 ActRec
* ar
= getFP();
1255 if (ar
->skipFrame()) {
1256 ar
= getPrevVMState(ar
);
1258 while (ar
->m_func
->name()->isame(s_call_user_func
.get())
1259 || ar
->m_func
->name()->isame(s_call_user_func_array
.get())) {
1260 ar
= getPrevVMState(ar
);
1261 if (ar
== nullptr) {
1267 ar
= getPrevVMState(ar
, &pc
);
1268 while (ar
!= nullptr) {
1269 if (!ar
->m_func
->name()->isame(s_call_user_func
.get())
1270 && !ar
->m_func
->name()->isame(s_call_user_func_array
.get())) {
1271 Unit
* unit
= ar
->m_func
->unit();
1273 if ((lineNumber
= unit
->getLineNumber(pc
)) != -1) {
1274 result
.set(s_file
, unit
->filepath()->data(), true);
1275 result
.set(s_line
, lineNumber
);
1279 ar
= getPrevVMState(ar
, &pc
);
1284 bool VMExecutionContext::renameFunction(const StringData
* oldName
,
1285 const StringData
* newName
) {
1286 return m_renamedFuncs
.rename(oldName
, newName
);
1289 bool VMExecutionContext::isFunctionRenameable(const StringData
* name
) {
1290 return m_renamedFuncs
.isFunctionRenameable(name
);
1293 void VMExecutionContext::addRenameableFunctions(ArrayData
* arr
) {
1294 m_renamedFuncs
.addRenameableFunctions(arr
);
1297 VarEnv
* VMExecutionContext::getVarEnv() {
1300 ActRec
* fp
= getFP();
1301 if (UNLIKELY(!fp
)) return NULL
;
1302 if (fp
->skipFrame()) {
1303 fp
= getPrevVMState(fp
);
1305 if (!fp
) return nullptr;
1306 assert(!fp
->hasInvName());
1307 if (!fp
->hasVarEnv()) {
1308 fp
->setVarEnv(VarEnv::createLocalOnStack(fp
));
1310 return fp
->m_varEnv
;
1313 void VMExecutionContext::setVar(StringData
* name
, TypedValue
* v
, bool ref
) {
1315 // setVar() should only be called after getVarEnv() has been called
1316 // to create a varEnv
1317 ActRec
*fp
= getFP();
1319 if (fp
->skipFrame()) {
1320 fp
= getPrevVMState(fp
);
1322 assert(!fp
->hasInvName());
1323 assert(!fp
->hasExtraArgs());
1324 assert(fp
->m_varEnv
!= nullptr);
1326 fp
->m_varEnv
->bind(name
, v
);
1328 fp
->m_varEnv
->set(name
, v
);
1332 Array
VMExecutionContext::getLocalDefinedVariables(int frame
) {
1334 ActRec
*fp
= getFP();
1335 for (; frame
> 0; --frame
) {
1337 fp
= getPrevVMState(fp
);
1340 return Array::Create();
1342 assert(!fp
->hasInvName());
1343 if (fp
->hasVarEnv()) {
1344 return fp
->m_varEnv
->getDefinedVariables();
1346 const Func
*func
= fp
->m_func
;
1347 auto numLocals
= func
->numNamedLocals();
1348 ArrayInit
ret(numLocals
);
1349 for (Id id
= 0; id
< numLocals
; ++id
) {
1350 TypedValue
* ptv
= frame_local(fp
, id
);
1351 if (ptv
->m_type
== KindOfUninit
) {
1354 Variant
name(func
->localVarName(id
));
1355 ret
.add(name
, tvAsVariant(ptv
));
1357 return ret
.toArray();
1360 void VMExecutionContext::shuffleMagicArgs(ActRec
* ar
) {
1361 // We need to put this where the first argument is
1362 StringData
* invName
= ar
->getInvName();
1363 int nargs
= ar
->numArgs();
1364 ar
->setVarEnv(nullptr);
1365 assert(!ar
->hasVarEnv() && !ar
->hasInvName());
1366 // We need to make an array containing all the arguments passed by the
1367 // caller and put it where the second argument is
1368 ArrayData
* argArray
= pack_args_into_array(ar
, nargs
);
1369 argArray
->incRefCount();
1370 // Remove the arguments from the stack
1371 for (int i
= 0; i
< nargs
; ++i
) {
1374 // Move invName to where the first argument belongs, no need
1375 // to incRef/decRef since we are transferring ownership
1376 m_stack
.pushStringNoRc(invName
);
1377 // Move argArray to where the second argument belongs. We've already
1378 // incReffed the array above so we don't need to do it here.
1379 m_stack
.pushArrayNoRc(argArray
);
1384 static inline void checkStack(Stack
& stk
, const Func
* f
) {
1385 ThreadInfo
* info
= ThreadInfo::s_threadInfo
.getNoCheck();
1386 // Check whether func's maximum stack usage would overflow the stack.
1387 // Both native and VM stack overflows are independently possible.
1388 if (!stack_in_bounds(info
) ||
1389 stk
.wouldOverflow(f
->maxStackCells() + kStackCheckPadding
)) {
1390 TRACE(1, "Maximum VM stack depth exceeded.\n");
1391 raise_error("Stack overflow");
1395 bool VMExecutionContext::prepareFuncEntry(ActRec
*ar
, PC
& pc
) {
1396 const Func
* func
= ar
->m_func
;
1397 Offset firstDVInitializer
= InvalidAbsoluteOffset
;
1398 bool raiseMissingArgumentWarnings
= false;
1399 int nparams
= func
->numParams();
1400 if (UNLIKELY(ar
->m_varEnv
!= nullptr)) {
1402 * m_varEnv != nullptr => we have a varEnv, extraArgs, or an invName.
1404 if (ar
->hasInvName()) {
1405 // shuffleMagicArgs deals with everything. no need for
1406 // further argument munging
1407 shuffleMagicArgs(ar
);
1408 } else if (ar
->hasVarEnv()) {
1410 if (!func
->isGenerator()) {
1411 assert(func
->isPseudoMain());
1412 pushLocalsAndIterators(func
);
1413 ar
->m_varEnv
->attach(ar
);
1415 pc
= func
->getEntry();
1416 // Nothing more to do; get out
1419 assert(ar
->hasExtraArgs());
1420 assert(func
->numParams() < ar
->numArgs());
1423 int nargs
= ar
->numArgs();
1424 if (nargs
!= nparams
) {
1425 if (nargs
< nparams
) {
1426 // Push uninitialized nulls for missing arguments. Some of them may end
1427 // up getting default-initialized, but regardless, we need to make space
1428 // for them on the stack.
1429 const Func::ParamInfoVec
& paramInfo
= func
->params();
1430 for (int i
= nargs
; i
< nparams
; ++i
) {
1431 m_stack
.pushUninit();
1432 Offset dvInitializer
= paramInfo
[i
].funcletOff();
1433 if (dvInitializer
== InvalidAbsoluteOffset
) {
1434 // We wait to raise warnings until after all the locals have been
1435 // initialized. This is important because things need to be in a
1436 // consistent state in case the user error handler throws.
1437 raiseMissingArgumentWarnings
= true;
1438 } else if (firstDVInitializer
== InvalidAbsoluteOffset
) {
1439 // This is the first unpassed arg with a default value, so
1440 // this is where we'll need to jump to.
1441 firstDVInitializer
= dvInitializer
;
1445 if (func
->attrs() & AttrMayUseVV
) {
1446 // Extra parameters must be moved off the stack.
1447 const int numExtras
= nargs
- nparams
;
1448 ar
->setExtraArgs(ExtraArgs::allocateCopy((TypedValue
*)ar
- nargs
,
1450 m_stack
.ndiscard(numExtras
);
1452 // The function we're calling is not marked as "MayUseVV",
1453 // so just discard the extra arguments
1454 int numExtras
= nargs
- nparams
;
1455 for (int i
= 0; i
< numExtras
; i
++) {
1458 ar
->setNumArgs(nparams
);
1464 int nlocals
= nparams
;
1465 if (UNLIKELY(func
->isClosureBody())) {
1466 int nuse
= init_closure(ar
, m_stack
.top());
1467 // init_closure doesn't move m_stack
1468 m_stack
.nalloc(nuse
);
1473 if (LIKELY(!func
->isGenerator())) {
1475 * we only get here from callAndResume
1476 * if we failed to get a translation for
1477 * a generator's prologue
1479 pushLocalsAndIterators(func
, nlocals
);
1483 if (firstDVInitializer
!= InvalidAbsoluteOffset
) {
1484 pc
= func
->unit()->entry() + firstDVInitializer
;
1486 pc
= func
->getEntry();
1488 // cppext functions/methods have their own logic for raising
1489 // warnings for missing arguments, so we only need to do this work
1490 // for non-cppext functions/methods
1491 if (raiseMissingArgumentWarnings
&& !func
->info()) {
1492 // need to sync m_pc to pc for backtraces/re-entry
1494 const Func::ParamInfoVec
& paramInfo
= func
->params();
1495 for (int i
= ar
->numArgs(); i
< nparams
; ++i
) {
1496 Offset dvInitializer
= paramInfo
[i
].funcletOff();
1497 if (dvInitializer
== InvalidAbsoluteOffset
) {
1498 const char* name
= func
->name()->data();
1500 raise_warning(Strings::MISSING_ARGUMENT
, name
, i
);
1502 raise_warning(Strings::MISSING_ARGUMENTS
, name
, nparams
, i
);
1510 void VMExecutionContext::syncGdbState() {
1511 if (RuntimeOption::EvalJit
&& !RuntimeOption::EvalJitNoGdb
) {
1512 tx()->getDebugInfo()->debugSync();
1516 void VMExecutionContext::enterVMPrologue(ActRec
* enterFnAr
) {
1518 Stats::inc(Stats::VMEnter
);
1519 if (ThreadInfo::s_threadInfo
->m_reqInjectionData
.getJit()) {
1520 int np
= enterFnAr
->m_func
->numParams();
1521 int na
= enterFnAr
->numArgs();
1522 if (na
> np
) na
= np
+ 1;
1523 Transl::TCA start
= enterFnAr
->m_func
->getPrologue(na
);
1524 tx()->enterTCAtProlog(enterFnAr
, start
);
1526 if (prepareFuncEntry(enterFnAr
, m_pc
)) {
1527 enterVMWork(enterFnAr
);
1532 void VMExecutionContext::enterVMWork(ActRec
* enterFnAr
) {
1533 Transl::TCA start
= nullptr;
1535 if (!EventHook::FunctionEnter(enterFnAr
, EventHook::NormalFunc
)) return;
1536 checkStack(m_stack
, enterFnAr
->m_func
);
1537 start
= enterFnAr
->m_func
->getFuncBody();
1539 Stats::inc(Stats::VMEnter
);
1540 if (ThreadInfo::s_threadInfo
->m_reqInjectionData
.getJit()) {
1541 (void) curUnit()->offsetOf(m_pc
); /* assert */
1544 tx()->enterTCAfterProlog(start
);
1546 SrcKey
sk(curFunc(), m_pc
);
1547 tx()->enterTCAtSrcKey(sk
);
1554 void VMExecutionContext::enterVM(TypedValue
* retval
, ActRec
* ar
) {
1555 DEBUG_ONLY
int faultDepth
= m_faults
.size();
1556 SCOPE_EXIT
{ assert(m_faults
.size() == faultDepth
); };
1559 ar
->m_savedRip
= reinterpret_cast<uintptr_t>(tx()->getCallToExit());
1560 assert(isReturnHelper(ar
->m_savedRip
));
1563 * When an exception is propagating, each nesting of the VM is
1564 * responsible for unwinding its portion of the execution stack, and
1565 * finding user handlers if it is a catchable exception.
1567 * This try/catch is where all this logic is centered. The actual
1568 * unwinding happens under exception_handler in unwind.cpp, which
1569 * returns a UnwindAction here to indicate what to do next.
1571 * Either we'll enter the VM loop again at a user error/fault
1572 * handler, or propagate the exception to a less-nested VM.
1579 if (m_fp
&& !ar
->m_varEnv
) {
1580 enterVMPrologue(ar
);
1581 } else if (prepareFuncEntry(ar
, m_pc
)) {
1588 // Everything succeeded with no exception---return to the previous
1589 // VM nesting level.
1590 *retval
= *m_stack
.topTV();
1595 always_assert(Transl::tl_regState
== Transl::VMRegState::CLEAN
);
1596 auto const action
= exception_handler();
1597 if (action
== UnwindAction::ResumeVM
) {
1600 always_assert(action
== UnwindAction::Propagate
);
1604 * Here we have to propagate an exception out of this VM's nesting
1608 if (g_vmContext
->m_nestedVMs
.empty()) {
1613 assert(m_faults
.size() > 0);
1614 Fault fault
= m_faults
.back();
1615 m_faults
.pop_back();
1617 switch (fault
.m_faultType
) {
1618 case Fault::Type::UserException
:
1620 Object obj
= fault
.m_userException
;
1621 fault
.m_userException
->decRefCount();
1624 case Fault::Type::CppException
:
1625 // throwException() will take care of deleting heap-allocated
1626 // exception object for us
1627 fault
.m_cppException
->throwException();
1634 void VMExecutionContext::reenterVM(TypedValue
* retval
,
1636 TypedValue
* savedSP
) {
1639 VMState savedVM
= { getPC(), getFP(), m_firstAR
, savedSP
};
1640 TRACE(3, "savedVM: %p %p %p %p\n", m_pc
, m_fp
, m_firstAR
, savedSP
);
1641 pushVMState(savedVM
, ar
);
1642 assert(m_nestedVMs
.size() >= 1);
1644 enterVM(retval
, ar
);
1650 TRACE(1, "Reentry: exit fp %p pc %p\n", m_fp
, m_pc
);
1653 void VMExecutionContext::invokeFunc(TypedValue
* retval
,
1656 ObjectData
* this_
/* = NULL */,
1657 Class
* cls
/* = NULL */,
1658 VarEnv
* varEnv
/* = NULL */,
1659 StringData
* invName
/* = NULL */,
1660 InvokeFlags flags
/* = InvokeNormal */) {
1663 // If this is a regular function, this_ and cls must be NULL
1664 assert(f
->preClass() || f
->isPseudoMain() || (!this_
&& !cls
));
1665 // If this is a method, either this_ or cls must be non-NULL
1666 assert(!f
->preClass() || (this_
|| cls
));
1667 // If this is a static method, this_ must be NULL
1668 assert(!(f
->attrs() & AttrStatic
&& !f
->isClosureBody()) ||
1670 // invName should only be non-NULL if we are calling __call or
1672 assert(!invName
|| f
->name()->isame(s___call
.get()) ||
1673 f
->name()->isame(s___callStatic
.get()));
1674 // If a variable environment is being inherited then params must be empty
1675 assert(!varEnv
|| params
.empty());
1679 bool isMagicCall
= (invName
!= nullptr);
1681 if (this_
!= nullptr) {
1682 this_
->incRefCount();
1684 Cell
* savedSP
= m_stack
.top();
1686 if (f
->numParams() > kStackCheckReenterPadding
- kNumActRecCells
) {
1687 checkStack(m_stack
, f
);
1690 if (flags
& InvokePseudoMain
) {
1691 assert(f
->isPseudoMain() && !params
.get());
1692 Unit
* toMerge
= f
->unit();
1694 if (toMerge
->isMergeOnly()) {
1695 *retval
= *toMerge
->getMainReturn();
1700 ActRec
* ar
= m_stack
.allocA();
1709 ar
->setThis(nullptr);
1714 ar
->initNumArgs(params
.size());
1716 ar
->setVarEnv(varEnv
);
1719 if (m_fp
== nullptr) {
1720 TRACE(1, "Reentry: enter %s(%p) from top-level\n",
1721 f
->name()->data(), ar
);
1723 TRACE(1, "Reentry: enter %s(pc %p ar %p) from %s(%p)\n",
1724 f
->name()->data(), m_pc
, ar
,
1725 m_fp
->m_func
? m_fp
->m_func
->name()->data() : "unknownBuiltin", m_fp
);
1729 ArrayData
*arr
= params
.get();
1731 // Put the method name into the location of the first parameter. We
1732 // are transferring ownership, so no need to incRef/decRef here.
1733 m_stack
.pushStringNoRc(invName
);
1734 // Put array of arguments into the location of the second parameter
1735 m_stack
.pushArray(arr
);
1737 const int numParams
= f
->numParams();
1738 const int numExtraArgs
= arr
->size() - numParams
;
1739 ExtraArgs
* extraArgs
= nullptr;
1740 if (numExtraArgs
> 0 && (f
->attrs() & AttrMayUseVV
)) {
1741 extraArgs
= ExtraArgs::allocateUninit(numExtraArgs
);
1742 ar
->setExtraArgs(extraArgs
);
1745 for (ssize_t i
= arr
->iter_begin();
1746 i
!= ArrayData::invalid_index
;
1747 i
= arr
->iter_advance(i
), ++paramId
) {
1748 TypedValue
*from
= arr
->nvGetValueRef(i
);
1750 if (LIKELY(paramId
< numParams
)) {
1751 to
= m_stack
.allocTV();
1753 if (!(f
->attrs() & AttrMayUseVV
)) {
1754 // Discard extra arguments, since the function cannot
1755 // possibly use them.
1756 assert(extraArgs
== nullptr);
1757 ar
->setNumArgs(numParams
);
1760 assert(extraArgs
!= nullptr && numExtraArgs
> 0);
1761 // VarEnv expects the extra args to be in "reverse" order
1762 // (i.e. the last extra arg has the lowest address)
1763 to
= extraArgs
->getExtraArg(paramId
- numParams
);
1766 if (LIKELY(!f
->byRef(paramId
))) {
1767 if (to
->m_type
== KindOfRef
) {
1770 } else if (!(flags
& InvokeIgnoreByRefErrors
) &&
1771 (from
->m_type
!= KindOfRef
||
1772 from
->m_data
.pref
->_count
== 2)) {
1773 raise_warning("Parameter %d to %s() expected to be "
1774 "a reference, value given",
1775 paramId
+ 1, f
->fullName()->data());
1776 if (skipCufOnInvalidParams
) {
1778 int n
= paramId
>= numParams
? paramId
- numParams
+ 1 : 0;
1779 ExtraArgs::deallocate(extraArgs
, n
);
1780 ar
->m_varEnv
= nullptr;
1783 while (paramId
>= 0) {
1788 tvWriteNull(retval
);
1796 reenterVM(retval
, ar
, savedSP
);
1798 assert(m_nestedVMs
.size() == 0);
1799 enterVM(retval
, ar
);
1803 void VMExecutionContext::invokeFuncFew(TypedValue
* retval
,
1806 StringData
* invName
,
1807 int argc
, TypedValue
* argv
) {
1810 // If this is a regular function, this_ and cls must be NULL
1811 assert(f
->preClass() || !thisOrCls
);
1812 // If this is a method, either this_ or cls must be non-NULL
1813 assert(!f
->preClass() || thisOrCls
);
1814 // If this is a static method, this_ must be NULL
1815 assert(!(f
->attrs() & AttrStatic
&& !f
->isClosureBody()) ||
1816 !ActRec::decodeThis(thisOrCls
));
1817 // invName should only be non-NULL if we are calling __call or
1819 assert(!invName
|| f
->name()->isame(s___call
.get()) ||
1820 f
->name()->isame(s___callStatic
.get()));
1824 if (ObjectData
* thiz
= ActRec::decodeThis(thisOrCls
)) {
1825 thiz
->incRefCount();
1827 Cell
* savedSP
= m_stack
.top();
1828 if (argc
> kStackCheckReenterPadding
- kNumActRecCells
) {
1829 checkStack(m_stack
, f
);
1831 ActRec
* ar
= m_stack
.allocA();
1835 ar
->m_this
= (ObjectData
*)thisOrCls
;
1836 ar
->initNumArgs(argc
);
1837 if (UNLIKELY(invName
!= nullptr)) {
1838 ar
->setInvName(invName
);
1840 ar
->m_varEnv
= nullptr;
1844 if (m_fp
== nullptr) {
1845 TRACE(1, "Reentry: enter %s(%p) from top-level\n",
1846 f
->name()->data(), ar
);
1848 TRACE(1, "Reentry: enter %s(pc %p ar %p) from %s(%p)\n",
1849 f
->name()->data(), m_pc
, ar
,
1850 m_fp
->m_func
? m_fp
->m_func
->name()->data() : "unknownBuiltin", m_fp
);
1854 for (int i
= 0; i
< argc
; i
++) {
1855 *m_stack
.allocTV() = *argv
++;
1859 reenterVM(retval
, ar
, savedSP
);
1861 assert(m_nestedVMs
.size() == 0);
1862 enterVM(retval
, ar
);
1866 void VMExecutionContext::invokeContFunc(const Func
* f
,
1868 TypedValue
* param
/* = NULL */) {
1874 this_
->incRefCount();
1876 Cell
* savedSP
= m_stack
.top();
1878 // no need to check stack due to ReenterPadding
1879 assert(kStackCheckReenterPadding
- kNumActRecCells
>= 1);
1881 ActRec
* ar
= m_stack
.allocA();
1885 ar
->initNumArgs(param
!= nullptr ? 1 : 0);
1887 ar
->setVarEnv(nullptr);
1889 if (param
!= nullptr) {
1890 tvDup(*param
, *m_stack
.allocTV());
1894 reenterVM(&retval
, ar
, savedSP
);
1895 // Codegen for generator functions guarantees that they will return null
1896 assert(IS_NULL_TYPE(retval
.m_type
));
1899 void VMExecutionContext::invokeUnit(TypedValue
* retval
, Unit
* unit
) {
1900 Func
* func
= unit
->getMain();
1901 invokeFunc(retval
, func
, null_array
, nullptr, nullptr,
1902 m_globalVarEnv
, nullptr, InvokePseudoMain
);
1906 * Given a pointer to a VM frame, returns the previous VM frame in the call
1907 * stack. This function will also pass back by reference the previous PC (if
1908 * prevPc is non-null) and the previous SP (if prevSp is non-null).
1910 * If there is no previous VM frame, this function returns NULL and does not
1911 * set prevPc and prevSp.
1913 ActRec
* VMExecutionContext::getPrevVMState(const ActRec
* fp
,
1914 Offset
* prevPc
/* = NULL */,
1915 TypedValue
** prevSp
/* = NULL */,
1916 bool* fromVMEntry
/* = NULL */) {
1917 if (fp
== nullptr) {
1920 ActRec
* prevFp
= fp
->arGetSfp();
1923 if (UNLIKELY(fp
->m_func
->isGenerator())) {
1924 *prevSp
= (TypedValue
*)prevFp
- prevFp
->m_func
->numSlotsInFrame();
1926 *prevSp
= (TypedValue
*)&fp
[1];
1929 if (prevPc
) *prevPc
= prevFp
->m_func
->base() + fp
->m_soff
;
1930 if (fromVMEntry
) *fromVMEntry
= false;
1933 // Linear search from end of m_nestedVMs. In practice, we're probably
1934 // looking for something recently pushed.
1935 int i
= m_nestedVMs
.size() - 1;
1936 for (; i
>= 0; --i
) {
1937 if (m_nestedVMs
[i
].m_entryFP
== fp
) break;
1939 if (i
== -1) return nullptr;
1940 const VMState
& vmstate
= m_nestedVMs
[i
].m_savedState
;
1941 prevFp
= vmstate
.fp
;
1943 assert(prevFp
->m_func
->unit());
1944 if (prevSp
) *prevSp
= vmstate
.sp
;
1945 if (prevPc
) *prevPc
= prevFp
->m_func
->unit()->offsetOf(vmstate
.pc
);
1946 if (fromVMEntry
) *fromVMEntry
= true;
1950 Array
VMExecutionContext::debugBacktrace(bool skip
/* = false */,
1951 bool withSelf
/* = false */,
1952 bool withThis
/* = false */,
1954 parserFrame
/* = NULL */,
1955 bool ignoreArgs
/* = false */,
1956 int limit
/* = 0 */) {
1957 Array bt
= Array::Create();
1959 // If there is a parser frame, put it at the beginning of
1964 .set(s_file
, parserFrame
->filename
, true)
1965 .set(s_line
, parserFrame
->lineNumber
, true)
1972 // If there are no VM frames, we're done
1977 ActRec
* fp
= nullptr;
1980 // Get the fp and pc of the top frame (possibly skipping one frame)
1983 fp
= getPrevVMState(getFP(), &pc
);
1985 // We skipped over the only VM frame, we're done
1990 Unit
*unit
= getFP()->m_func
->unit();
1992 pc
= unit
->offsetOf(m_pc
);
1995 // Handle the top frame
1997 // Builtins don't have a file and line number
1998 if (!fp
->m_func
->isBuiltin()) {
1999 Unit
*unit
= fp
->m_func
->unit();
2001 const char* filename
= unit
->filepath()->data();
2002 if (fp
->m_func
->originalFilename()) {
2003 filename
= fp
->m_func
->originalFilename()->data();
2008 ArrayInit
frame(parserFrame
? 4 : 2);
2009 frame
.set(s_file
, filename
, true);
2010 frame
.set(s_line
, unit
->getLineNumber(off
), true);
2012 frame
.set(s_function
, s_include
, true);
2013 frame
.set(s_args
, Array::Create(parserFrame
->filename
), true);
2015 bt
.append(frame
.toVariant());
2021 // Handle the subsequent VM frames
2023 for (ActRec
* prevFp
= getPrevVMState(fp
, &prevPc
);
2024 fp
!= nullptr && (limit
== 0 || depth
< limit
);
2025 fp
= prevFp
, pc
= prevPc
, prevFp
= getPrevVMState(fp
, &prevPc
)) {
2026 // do not capture frame for HPHP only functions
2027 if (fp
->m_func
->isNoInjection()) {
2033 auto const curUnit
= fp
->m_func
->unit();
2034 auto const curOp
= toOp(*curUnit
->at(pc
));
2035 auto const isReturning
= curOp
== OpRetC
|| curOp
== OpRetV
;
2037 // Builtins and generators don't have a file and line number
2038 if (prevFp
&& !prevFp
->m_func
->isBuiltin() && !fp
->m_func
->isGenerator()) {
2039 auto const prevUnit
= prevFp
->m_func
->unit();
2040 auto prevFile
= prevUnit
->filepath();
2041 if (prevFp
->m_func
->originalFilename()) {
2042 prevFile
= prevFp
->m_func
->originalFilename();
2045 frame
.set(s_file
, const_cast<StringData
*>(prevFile
), true);
2047 // In the normal method case, the "saved pc" for line number printing is
2048 // pointing at the cell conversion (Unbox/Pop) instruction, not the call
2049 // itself. For multi-line calls, this instruction is associated with the
2050 // subsequent line which results in an off-by-n. We're subtracting one
2051 // in order to look up the line associated with the FCall/FCallArray
2052 // instruction. Exception handling and the other opcodes (ex. BoxR)
2053 // already do the right thing. The emitter associates object access with
2054 // the subsequent expression and this would be difficult to modify.
2055 auto const opAtPrevPc
=
2056 toOp(*reinterpret_cast<const Opcode
*>(prevUnit
->at(prevPc
)));
2057 Offset pcAdjust
= 0;
2058 if (opAtPrevPc
== OpPopR
|| opAtPrevPc
== OpUnboxR
) {
2062 prevFp
->m_func
->unit()->getLineNumber(prevPc
- pcAdjust
),
2066 // check for include
2067 String funcname
= const_cast<StringData
*>(fp
->m_func
->name());
2068 if (fp
->m_func
->isGenerator()) {
2069 // retrieve the original function name from the inner continuation
2070 TypedValue
* tv
= frame_local(fp
, 0);
2071 assert(tv
->m_type
== HPHP::KindOfObject
);
2072 funcname
= static_cast<c_Continuation
*>(
2073 tv
->m_data
.pobj
)->t_getorigfuncname();
2076 if (fp
->m_func
->isClosureBody()) {
2077 static StringData
* s_closure_label
=
2078 StringData::GetStaticString("{closure}");
2079 funcname
= s_closure_label
;
2082 // check for pseudomain
2083 if (funcname
->empty()) {
2084 if (!prevFp
) continue;
2085 funcname
= s_include
;
2088 frame
.set(s_function
, funcname
, true);
2090 if (!funcname
.same(s_include
)) {
2091 // Closures have an m_this but they aren't in object context
2092 Class
* ctx
= arGetContextClass(fp
);
2093 if (ctx
!= nullptr && !fp
->m_func
->isClosureBody()) {
2094 frame
.set(s_class
, ctx
->name()->data(), true);
2095 if (fp
->hasThis() && !isReturning
) {
2097 frame
.set(s_object
, Object(fp
->getThis()), true);
2099 frame
.set(s_type
, "->", true);
2101 frame
.set(s_type
, "::", true);
2106 Array args
= Array::Create();
2109 } else if (funcname
.same(s_include
)) {
2111 args
.append(const_cast<StringData
*>(curUnit
->filepath()));
2112 frame
.set(s_args
, args
, true);
2114 } else if (!RuntimeOption::EnableArgsInBacktraces
|| isReturning
) {
2115 // Provide an empty 'args' array to be consistent with hphpc
2116 frame
.set(s_args
, args
, true);
2118 int nparams
= fp
->m_func
->numParams();
2119 int nargs
= fp
->numArgs();
2120 /* builtin extra args are not stored in varenv */
2121 if (nargs
<= nparams
) {
2122 for (int i
= 0; i
< nargs
; i
++) {
2123 TypedValue
*arg
= frame_local(fp
, i
);
2124 args
.append(tvAsVariant(arg
));
2128 for (i
= 0; i
< nparams
; i
++) {
2129 TypedValue
*arg
= frame_local(fp
, i
);
2130 args
.append(tvAsVariant(arg
));
2132 for (; i
< nargs
; i
++) {
2133 TypedValue
*arg
= fp
->getExtraArg(i
- nparams
);
2134 args
.append(tvAsVariant(arg
));
2137 frame
.set(s_args
, args
, true);
2140 bt
.append(frame
.toVariant());
2146 MethodInfoVM::~MethodInfoVM() {
2147 for (std::vector
<const ClassInfo::ParameterInfo
*>::iterator it
=
2148 parameters
.begin(); it
!= parameters
.end(); ++it
) {
2149 if ((*it
)->value
!= nullptr) {
2150 free((void*)(*it
)->value
);
2155 ClassInfoVM::~ClassInfoVM() {
2156 destroyMembers(m_methodsVec
);
2157 destroyMapValues(m_properties
);
2158 destroyMapValues(m_constants
);
2161 Array
VMExecutionContext::getUserFunctionsInfo() {
2162 // Return an array of all user-defined function names. This method is used to
2163 // support get_defined_functions().
2164 return Unit::getUserFunctions();
2167 Array
VMExecutionContext::getConstantsInfo() {
2168 // Return an array of all defined constant:value pairs. This method is used
2169 // to support get_defined_constants().
2170 return Array::Create();
2173 const ClassInfo::MethodInfo
* VMExecutionContext::findFunctionInfo(
2175 StringIMap
<AtomicSmartPtr
<MethodInfoVM
> >::iterator it
=
2176 m_functionInfos
.find(name
);
2177 if (it
== m_functionInfos
.end()) {
2178 Func
* func
= Unit::loadFunc(name
.get());
2179 if (func
== nullptr || func
->builtinFuncPtr()) {
2182 AtomicSmartPtr
<MethodInfoVM
> &m
= m_functionInfos
[name
];
2183 m
= new MethodInfoVM();
2184 func
->getFuncInfo(m
.get());
2187 return it
->second
.get();
2191 const ClassInfo
* VMExecutionContext::findClassInfo(CStrRef name
) {
2192 if (name
->empty()) return nullptr;
2193 StringIMap
<AtomicSmartPtr
<ClassInfoVM
> >::iterator it
=
2194 m_classInfos
.find(name
);
2195 if (it
== m_classInfos
.end()) {
2196 Class
* cls
= Unit::lookupClass(name
.get());
2197 if (cls
== nullptr) return nullptr;
2198 if (cls
->clsInfo()) return cls
->clsInfo();
2199 if (cls
->attrs() & (AttrInterface
| AttrTrait
)) {
2200 // If the specified name matches with something that is not formally
2201 // a class, return NULL
2204 AtomicSmartPtr
<ClassInfoVM
> &c
= m_classInfos
[name
];
2205 c
= new ClassInfoVM();
2206 cls
->getClassInfo(c
.get());
2209 return it
->second
.get();
2213 const ClassInfo
* VMExecutionContext::findInterfaceInfo(CStrRef name
) {
2214 StringIMap
<AtomicSmartPtr
<ClassInfoVM
> >::iterator it
=
2215 m_interfaceInfos
.find(name
);
2216 if (it
== m_interfaceInfos
.end()) {
2217 Class
* cls
= Unit::lookupClass(name
.get());
2218 if (cls
== nullptr) return nullptr;
2219 if (cls
->clsInfo()) return cls
->clsInfo();
2220 if (!(cls
->attrs() & AttrInterface
)) {
2221 // If the specified name matches with something that is not formally
2222 // an interface, return NULL
2225 AtomicSmartPtr
<ClassInfoVM
> &c
= m_interfaceInfos
[name
];
2226 c
= new ClassInfoVM();
2227 cls
->getClassInfo(c
.get());
2230 return it
->second
.get();
2234 const ClassInfo
* VMExecutionContext::findTraitInfo(CStrRef name
) {
2235 StringIMap
<AtomicSmartPtr
<ClassInfoVM
> >::iterator it
=
2236 m_traitInfos
.find(name
);
2237 if (it
!= m_traitInfos
.end()) {
2238 return it
->second
.get();
2240 Class
* cls
= Unit::lookupClass(name
.get());
2241 if (cls
== nullptr) return nullptr;
2242 if (cls
->clsInfo()) return cls
->clsInfo();
2243 if (!(cls
->attrs() & AttrTrait
)) {
2246 AtomicSmartPtr
<ClassInfoVM
> &classInfo
= m_traitInfos
[name
];
2247 classInfo
= new ClassInfoVM();
2248 cls
->getClassInfo(classInfo
.get());
2249 return classInfo
.get();
2252 const ClassInfo::ConstantInfo
* VMExecutionContext::findConstantInfo(
2254 TypedValue
* tv
= Unit::lookupCns(name
.get());
2255 if (tv
== nullptr) {
2258 ConstInfoMap::const_iterator it
= m_constInfo
.find(name
.get());
2259 if (it
!= m_constInfo
.end()) {
2262 StringData
* key
= StringData::GetStaticString(name
.get());
2263 ClassInfo::ConstantInfo
* ci
= new ClassInfo::ConstantInfo();
2264 ci
->name
= *(const String
*)&key
;
2267 ci
->setValue(tvAsCVarRef(tv
));
2268 m_constInfo
[key
] = ci
;
2272 HPHP::Eval::PhpFile
* VMExecutionContext::lookupPhpFile(StringData
* path
,
2273 const char* currentDir
,
2274 bool* initial_opt
) {
2276 bool &initial
= initial_opt
? *initial_opt
: init
;
2280 String spath
= Eval::resolveVmInclude(path
, currentDir
, &s
);
2281 if (spath
.isNull()) return nullptr;
2283 // Check if this file has already been included.
2284 EvaledFilesMap::const_iterator it
= m_evaledFiles
.find(spath
.get());
2285 HPHP::Eval::PhpFile
* efile
= nullptr;
2286 if (it
!= m_evaledFiles
.end()) {
2287 // We found it! Return the unit.
2292 // We didn't find it, so try the realpath.
2293 bool alreadyResolved
=
2294 RuntimeOption::RepoAuthoritative
||
2295 (!RuntimeOption::CheckSymLink
&& (spath
[0] == '/'));
2296 bool hasRealpath
= false;
2298 if (!alreadyResolved
) {
2299 std::string rp
= StatCache::realpath(spath
.data());
2300 if (rp
.size() != 0) {
2301 rpath
= NEW(StringData
)(rp
.data(), rp
.size(), CopyString
);
2302 if (!rpath
.same(spath
)) {
2304 it
= m_evaledFiles
.find(rpath
.get());
2305 if (it
!= m_evaledFiles
.end()) {
2306 // We found it! Update the mapping for spath and
2309 m_evaledFiles
[spath
.get()] = efile
;
2310 spath
.get()->incRefCount();
2317 // This file hasn't been included yet, so we need to parse the file
2318 efile
= HPHP::Eval::FileRepository::checkoutFile(
2319 hasRealpath
? rpath
.get() : spath
.get(), s
);
2320 if (efile
&& initial_opt
) {
2321 // if initial_opt is not set, this shouldnt be recorded as a
2322 // per request fetch of the file.
2323 if (Transl::TargetCache::testAndSetBit(efile
->getId())) {
2326 // if parsing was successful, update the mappings for spath and
2327 // rpath (if it exists).
2328 m_evaledFiles
[spath
.get()] = efile
;
2329 spath
.get()->incRefCount();
2330 // Don't incRef efile; checkoutFile() already counted it.
2332 m_evaledFiles
[rpath
.get()] = efile
;
2333 rpath
.get()->incRefCount();
2335 DEBUGGER_ATTACHED_ONLY(phpDebuggerFileLoadHook(efile
));
2340 Unit
* VMExecutionContext::evalInclude(StringData
* path
,
2341 const StringData
* curUnitFilePath
,
2343 namespace fs
= boost::filesystem
;
2344 HPHP::Eval::PhpFile
* efile
= nullptr;
2345 if (curUnitFilePath
) {
2346 fs::path
currentUnit(curUnitFilePath
->data());
2347 fs::path
currentDir(currentUnit
.branch_path());
2348 efile
= lookupPhpFile(path
, currentDir
.string().c_str(), initial
);
2350 efile
= lookupPhpFile(path
, "", initial
);
2353 return efile
->unit();
2358 HPHP::Unit
* VMExecutionContext::evalIncludeRoot(
2359 StringData
* path
, InclOpFlags flags
, bool* initial
) {
2360 HPHP::Eval::PhpFile
* efile
= lookupIncludeRoot(path
, flags
, initial
);
2361 return efile
? efile
->unit() : 0;
2364 HPHP::Eval::PhpFile
* VMExecutionContext::lookupIncludeRoot(StringData
* path
,
2369 if ((flags
& InclOpRelative
)) {
2370 namespace fs
= boost::filesystem
;
2371 if (!unit
) unit
= getFP()->m_func
->unit();
2372 fs::path
currentUnit(unit
->filepath()->data());
2373 fs::path
currentDir(currentUnit
.branch_path());
2374 absPath
= currentDir
.string() + '/';
2375 TRACE(2, "lookupIncludeRoot(%s): relative -> %s\n",
2379 assert(flags
& InclOpDocRoot
);
2380 absPath
= SourceRootInfo::GetCurrentPhpRoot();
2381 TRACE(2, "lookupIncludeRoot(%s): docRoot -> %s\n",
2386 absPath
+= StrNR(path
);
2388 EvaledFilesMap::const_iterator it
= m_evaledFiles
.find(absPath
.get());
2389 if (it
!= m_evaledFiles
.end()) {
2390 if (initial
) *initial
= false;
2394 return lookupPhpFile(absPath
.get(), "", initial
);
2398 Instantiate hoistable classes and functions.
2399 If there is any more work left to do, setup a
2400 new frame ready to execute the pseudomain.
2402 return true iff the pseudomain needs to be executed.
2404 bool VMExecutionContext::evalUnit(Unit
* unit
, PC
& pc
, int funcType
) {
2407 if (unit
->isMergeOnly()) {
2408 Stats::inc(Stats::PseudoMain_Skipped
);
2409 *m_stack
.allocTV() = *unit
->getMainReturn();
2412 Stats::inc(Stats::PseudoMain_Executed
);
2415 ActRec
* ar
= m_stack
.allocA();
2416 assert((uintptr_t)&ar
->m_func
< (uintptr_t)&ar
->m_r
);
2417 Class
* cls
= curClass();
2418 if (m_fp
->hasThis()) {
2419 ObjectData
*this_
= m_fp
->getThis();
2420 this_
->incRefCount();
2422 } else if (m_fp
->hasClass()) {
2423 ar
->setClass(m_fp
->getClass());
2425 ar
->setThis(nullptr);
2427 Func
* func
= unit
->getMain(cls
);
2428 assert(!func
->info());
2429 assert(!func
->isGenerator());
2433 assert(!m_fp
->hasInvName());
2435 ar
->m_soff
= uintptr_t(m_fp
->m_func
->unit()->offsetOf(pc
) -
2436 m_fp
->m_func
->base());
2438 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedFrame());
2439 assert(isReturnHelper(ar
->m_savedRip
));
2440 pushLocalsAndIterators(func
);
2441 if (!m_fp
->hasVarEnv()) {
2442 m_fp
->setVarEnv(VarEnv::createLocalOnStack(m_fp
));
2444 ar
->m_varEnv
= m_fp
->m_varEnv
;
2445 ar
->m_varEnv
->attach(ar
);
2448 pc
= func
->getEntry();
2450 bool ret
= EventHook::FunctionEnter(m_fp
, funcType
);
2456 s_php_namespace("<?php namespace "),
2457 s_curly_return(" { return "),
2458 s_semicolon_curly("; }"),
2459 s_php_return("<?php return "),
2461 CVarRef
VMExecutionContext::getEvaledArg(const StringData
* val
,
2462 CStrRef namespacedName
) {
2463 CStrRef key
= *(String
*)&val
;
2465 if (m_evaledArgs
.get()) {
2466 CVarRef arg
= m_evaledArgs
.get()->get(key
);
2467 if (&arg
!= &null_variant
) return arg
;
2471 int pos
= namespacedName
.rfind('\\');
2473 auto ns
= namespacedName
.substr(0, pos
);
2474 code
= s_php_namespace
+ ns
+ s_curly_return
+ key
+ s_semicolon_curly
;
2476 code
= s_php_return
+ key
+ s_semicolon
;
2478 Unit
* unit
= compileEvalString(code
.get());
2479 assert(unit
!= nullptr);
2481 // Default arg values are not currently allowed to depend on class context.
2482 g_vmContext
->invokeFunc((TypedValue
*)&v
, unit
->getMain(),
2483 null_array
, nullptr, nullptr, nullptr, nullptr,
2485 Variant
&lv
= m_evaledArgs
.lvalAt(key
, AccessFlags::Key
);
2491 * Helper for function entry, including pseudo-main entry.
2494 VMExecutionContext::pushLocalsAndIterators(const Func
* func
,
2495 int nparams
/*= 0*/) {
2497 for (int i
= nparams
; i
< func
->numLocals(); i
++) {
2498 m_stack
.pushUninit();
2501 for (int i
= 0; i
< func
->numIterators(); i
++) {
2506 void VMExecutionContext::enqueueSharedVar(SharedVariant
* svar
) {
2507 m_freedSvars
.push_back(svar
);
2510 class FreedSVars
: public Treadmill::WorkItem
{
2513 explicit FreedSVars(SVarVector
&& svars
) : m_svars(std::move(svars
)) {}
2514 virtual void operator()() {
2515 for (auto it
= m_svars
.begin(); it
!= m_svars
.end(); it
++) {
2521 void VMExecutionContext::treadmillSharedVars() {
2522 Treadmill::WorkItem::enqueue(new FreedSVars(std::move(m_freedSvars
)));
2525 void VMExecutionContext::destructObjects() {
2526 if (UNLIKELY(RuntimeOption::EnableObjDestructCall
)) {
2527 while (!m_liveBCObjs
.empty()) {
2528 ObjectData
* obj
= *m_liveBCObjs
.begin();
2529 obj
->destruct(); // Let the instance remove the node.
2531 m_liveBCObjs
.clear();
2535 // Evaled units have a footprint in the TC and translation metadata. The
2536 // applications we care about tend to have few, short, stereotyped evals,
2537 // where the same code keeps getting eval'ed over and over again; so we
2538 // keep around units for each eval'ed string, so that the TC space isn't
2539 // wasted on each eval.
2540 typedef RankedCHM
<StringData
*, HPHP::Unit
*,
2541 StringDataHashCompare
,
2542 RankEvaledUnits
> EvaledUnitsMap
;
2543 static EvaledUnitsMap s_evaledUnits
;
2544 Unit
* VMExecutionContext::compileEvalString(StringData
* code
) {
2545 EvaledUnitsMap::accessor acc
;
2546 // Promote this to a static string; otherwise it may get swept
2548 code
= StringData::GetStaticString(code
);
2549 if (s_evaledUnits
.insert(acc
, code
)) {
2550 acc
->second
= compile_string(code
->data(), code
->size());
2555 CStrRef
VMExecutionContext::createFunction(CStrRef args
, CStrRef code
) {
2557 // It doesn't matter if there's a user function named __lambda_func; we only
2558 // use this name during parsing, and then change it to an impossible name
2559 // with a NUL byte before we merge it into the request's func map. This also
2560 // has the bonus feature that the value of __FUNCTION__ inside the created
2561 // function will match Zend. (Note: Zend will actually fatal if there's a
2562 // user function named __lambda_func when you call create_function. Huzzah!)
2563 static StringData
* oldName
= StringData::GetStaticString("__lambda_func");
2564 std::ostringstream codeStr
;
2565 codeStr
<< "<?php function " << oldName
->data()
2566 << "(" << args
.data() << ") {"
2567 << code
.data() << "}\n";
2568 StringData
* evalCode
= StringData::GetStaticString(codeStr
.str());
2569 Unit
* unit
= compile_string(evalCode
->data(), evalCode
->size());
2570 // Move the function to a different name.
2571 std::ostringstream newNameStr
;
2572 newNameStr
<< '\0' << "lambda_" << ++m_lambdaCounter
;
2573 StringData
* newName
= StringData::GetStaticString(newNameStr
.str());
2574 unit
->renameFunc(oldName
, newName
);
2575 m_createdFuncs
.push_back(unit
);
2578 // Technically we shouldn't have to eval the unit right now (it'll execute
2579 // the pseudo-main, which should be empty) and could get away with just
2580 // mergeFuncs. However, Zend does it this way, as proven by the fact that you
2581 // can inject code into the evaled unit's pseudo-main:
2583 // create_function('', '} echo "hi"; if (0) {');
2585 // We have to eval now to emulate this behavior.
2587 invokeFunc(&retval
, unit
->getMain(), null_array
,
2588 nullptr, nullptr, nullptr, nullptr,
2591 // __lambda_func will be the only hoistable function.
2592 // Any functions or closures defined in it will not be hoistable.
2593 Func
* lambda
= unit
->firstHoistable();
2594 return lambda
->nameRef();
2597 void VMExecutionContext::evalPHPDebugger(TypedValue
* retval
, StringData
*code
,
2600 // The code has "<?php" prepended already
2601 Unit
* unit
= compileEvalString(code
);
2602 if (unit
== nullptr) {
2603 raise_error("Syntax error");
2604 tvWriteNull(retval
);
2608 VarEnv
*varEnv
= nullptr;
2609 ActRec
*fp
= getFP();
2610 ActRec
*cfpSave
= nullptr;
2612 for (; frame
> 0; --frame
) {
2613 ActRec
* prevFp
= getPrevVMState(fp
);
2615 // To be safe in case we failed to get prevFp. This would mean we've
2616 // been asked to eval in a frame which is beyond the top of the stack.
2617 // This suggests the debugger client has made an error.
2622 if (!fp
->hasVarEnv()) {
2623 fp
->setVarEnv(VarEnv::createLocalOnHeap(fp
));
2625 varEnv
= fp
->m_varEnv
;
2626 cfpSave
= varEnv
->getCfp();
2628 ObjectData
*this_
= nullptr;
2629 // NB: the ActRec and function within the AR may have different classes. The
2630 // class in the ActRec is the type used when invoking the function (i.e.,
2631 // Derived in Derived::Foo()) while the class obtained from the function is
2632 // the type that declared the function Foo, which may be Base. We need both
2633 // the class to match any object that this function may have been invoked on,
2634 // and we need the class from the function execution is stopped in.
2635 Class
*frameClass
= nullptr;
2636 Class
*functionClass
= nullptr;
2638 if (fp
->hasThis()) {
2639 this_
= fp
->getThis();
2640 } else if (fp
->hasClass()) {
2641 frameClass
= fp
->getClass();
2643 functionClass
= fp
->m_func
->cls();
2644 phpDebuggerEvalHook(fp
->m_func
);
2647 const static StaticString
s_cppException("Hit an exception");
2648 const static StaticString
s_phpException("Hit a php exception");
2649 const static StaticString
s_exit("Hit exit");
2650 const static StaticString
s_fatal("Hit fatal");
2652 // Invoke the given PHP, possibly specialized to match the type of the
2653 // current function on the stack, optionally passing a this pointer or
2654 // class used to execute the current function.
2655 invokeFunc(retval
, unit
->getMain(functionClass
), null_array
,
2656 this_
, frameClass
, varEnv
, nullptr, InvokePseudoMain
);
2657 } catch (FatalErrorException
&e
) {
2658 g_vmContext
->write(s_fatal
);
2659 g_vmContext
->write(" : ");
2660 g_vmContext
->write(e
.getMessage().c_str());
2661 g_vmContext
->write("\n");
2662 g_vmContext
->write(ExtendedLogger::StringOfStackTrace(e
.getBackTrace()));
2663 } catch (ExitException
&e
) {
2664 g_vmContext
->write(s_exit
.data());
2665 g_vmContext
->write(" : ");
2666 std::ostringstream os
;
2667 os
<< ExitException::ExitCode
;
2668 g_vmContext
->write(os
.str());
2669 } catch (Eval::DebuggerException
&e
) {
2671 varEnv
->setCfp(cfpSave
);
2674 } catch (Exception
&e
) {
2675 g_vmContext
->write(s_cppException
.data());
2676 g_vmContext
->write(" : ");
2677 g_vmContext
->write(e
.getMessage().c_str());
2678 ExtendedException
* ee
= dynamic_cast<ExtendedException
*>(&e
);
2680 g_vmContext
->write("\n");
2682 ExtendedLogger::StringOfStackTrace(ee
->getBackTrace()));
2684 } catch (Object
&e
) {
2685 g_vmContext
->write(s_phpException
.data());
2686 g_vmContext
->write(" : ");
2687 g_vmContext
->write(e
->t___tostring().data());
2689 g_vmContext
->write(s_cppException
.data());
2693 // The debugger eval frame may have attached to the VarEnv from a
2694 // frame that was not the top frame, so we need to manually set
2695 // cfp back to what it was before
2696 varEnv
->setCfp(cfpSave
);
2700 void VMExecutionContext::enterDebuggerDummyEnv() {
2701 static Unit
* s_debuggerDummy
= compile_string("<?php?>", 7);
2702 // Ensure that the VM stack is completely empty (m_fp should be null)
2703 // and that we're not in a nested VM (reentrancy)
2704 assert(getFP() == nullptr);
2705 assert(m_nestedVMs
.size() == 0);
2706 assert(m_nesting
== 0);
2707 assert(m_stack
.count() == 0);
2708 ActRec
* ar
= m_stack
.allocA();
2709 ar
->m_func
= s_debuggerDummy
->getMain();
2710 ar
->setThis(nullptr);
2713 ar
->m_savedRip
= reinterpret_cast<uintptr_t>(tx()->getCallToExit());
2714 assert(isReturnHelper(ar
->m_savedRip
));
2716 m_pc
= s_debuggerDummy
->entry();
2718 m_fp
->setVarEnv(m_globalVarEnv
);
2719 m_globalVarEnv
->attach(m_fp
);
2722 void VMExecutionContext::exitDebuggerDummyEnv() {
2723 assert(m_globalVarEnv
);
2724 // Ensure that m_fp is valid
2725 assert(getFP() != nullptr);
2726 // Ensure that m_fp points to the only frame on the call stack.
2727 // In other words, make sure there are no VM frames directly below
2728 // this one and that we are not in a nested VM (reentrancy)
2729 assert(m_fp
->arGetSfp() == m_fp
);
2730 assert(m_nestedVMs
.size() == 0);
2731 assert(m_nesting
== 0);
2732 // Teardown the frame we erected by enterDebuggerDummyEnv()
2733 const Func
* func
= m_fp
->m_func
;
2735 frame_free_locals_inl_no_hook
<true>(m_fp
, func
->numLocals());
2737 m_stack
.ndiscard(func
->numSlotsInFrame());
2738 m_stack
.discardAR();
2739 // After tearing down this frame, the VM stack should be completely empty
2740 assert(m_stack
.count() == 0);
2745 // Identifies the set of return helpers that we may set m_savedRip to in an
2747 bool VMExecutionContext::isReturnHelper(uintptr_t address
) {
2748 auto tcAddr
= reinterpret_cast<Transl::TCA
>(address
);
2749 return ((tcAddr
== tx()->getRetFromInterpretedFrame()) ||
2750 (tcAddr
== tx()->getRetFromInterpretedGeneratorFrame()) ||
2751 (tcAddr
== tx()->getCallToExit()));
2754 // Walk the stack and find any return address to jitted code and bash it to
2755 // the appropriate RetFromInterpreted*Frame helper. This ensures that we don't
2756 // return into jitted code and gives the system the proper chance to interpret
2757 // blacklisted tracelets.
2758 void VMExecutionContext::preventReturnsToTC() {
2759 assert(isDebuggerAttached());
2760 if (RuntimeOption::EvalJit
) {
2761 ActRec
*ar
= getFP();
2763 if (!isReturnHelper(ar
->m_savedRip
) &&
2764 (tx()->isValidCodeAddress((Transl::TCA
)ar
->m_savedRip
))) {
2765 TRACE_RB(2, "Replace RIP in fp %p, savedRip 0x%" PRIx64
", "
2766 "func %s\n", ar
, ar
->m_savedRip
,
2767 ar
->m_func
->fullName()->data());
2768 if (ar
->m_func
->isGenerator()) {
2770 reinterpret_cast<uintptr_t>(
2771 tx()->getRetFromInterpretedGeneratorFrame());
2774 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedFrame());
2776 assert(isReturnHelper(ar
->m_savedRip
));
2778 ar
= getPrevVMState(ar
);
2783 static inline StringData
* lookup_name(TypedValue
* key
) {
2784 return prepareKey(key
);
2787 static inline void lookup_var(ActRec
* fp
,
2791 name
= lookup_name(key
);
2792 const Func
* func
= fp
->m_func
;
2793 Id id
= func
->lookupVarId(name
);
2794 if (id
!= kInvalidId
) {
2795 val
= frame_local(fp
, id
);
2797 assert(!fp
->hasInvName());
2798 if (fp
->hasVarEnv()) {
2799 val
= fp
->m_varEnv
->lookup(name
);
2806 static inline void lookupd_var(ActRec
* fp
,
2810 name
= lookup_name(key
);
2811 const Func
* func
= fp
->m_func
;
2812 Id id
= func
->lookupVarId(name
);
2813 if (id
!= kInvalidId
) {
2814 val
= frame_local(fp
, id
);
2816 assert(!fp
->hasInvName());
2817 if (!fp
->hasVarEnv()) {
2818 fp
->setVarEnv(VarEnv::createLocalOnStack(fp
));
2820 val
= fp
->m_varEnv
->lookup(name
);
2821 if (val
== nullptr) {
2824 fp
->m_varEnv
->set(name
, &tv
);
2825 val
= fp
->m_varEnv
->lookup(name
);
2830 static inline void lookup_gbl(ActRec
* fp
,
2834 name
= lookup_name(key
);
2835 assert(g_vmContext
->m_globalVarEnv
);
2836 val
= g_vmContext
->m_globalVarEnv
->lookup(name
);
2839 static inline void lookupd_gbl(ActRec
* fp
,
2843 name
= lookup_name(key
);
2844 assert(g_vmContext
->m_globalVarEnv
);
2845 VarEnv
* varEnv
= g_vmContext
->m_globalVarEnv
;
2846 val
= varEnv
->lookup(name
);
2847 if (val
== nullptr) {
2850 varEnv
->set(name
, &tv
);
2851 val
= varEnv
->lookup(name
);
2855 static inline void lookup_sprop(ActRec
* fp
,
2862 assert(clsRef
->m_type
== KindOfClass
);
2863 name
= lookup_name(key
);
2864 Class
* ctx
= arGetContextClass(fp
);
2865 val
= clsRef
->m_data
.pcls
->getSProp(ctx
, name
, visible
, accessible
);
2868 static inline void lookupClsRef(TypedValue
* input
,
2870 bool decRef
= false) {
2871 const Class
* class_
= nullptr;
2872 if (IS_STRING_TYPE(input
->m_type
)) {
2873 class_
= Unit::loadClass(input
->m_data
.pstr
);
2874 if (class_
== nullptr) {
2875 output
->m_type
= KindOfNull
;
2876 raise_error(Strings::UNKNOWN_CLASS
, input
->m_data
.pstr
->data());
2878 } else if (input
->m_type
== KindOfObject
) {
2879 class_
= input
->m_data
.pobj
->getVMClass();
2881 output
->m_type
= KindOfNull
;
2882 raise_error("Cls: Expected string or object");
2885 tvRefcountedDecRef(input
);
2887 output
->m_data
.pcls
= const_cast<Class
*>(class_
);
2888 output
->m_type
= KindOfClass
;
2891 static UNUSED
int innerCount(const TypedValue
* tv
) {
2892 if (IS_REFCOUNTED_TYPE(tv
->m_type
)) {
2893 // We're using pref here arbitrarily; any refcounted union member works.
2894 return tv
->m_data
.pref
->_count
;
2899 static inline void ratchetRefs(TypedValue
*& result
, TypedValue
& tvRef
,
2900 TypedValue
& tvRef2
) {
2901 TRACE(5, "Ratchet: result %p(k%d c%d), ref %p(k%d c%d) ref2 %p(k%d c%d)\n",
2902 result
, result
->m_type
, innerCount(result
),
2903 &tvRef
, tvRef
.m_type
, innerCount(&tvRef
),
2904 &tvRef2
, tvRef2
.m_type
, innerCount(&tvRef2
));
2905 // Due to complications associated with ArrayAccess, it is possible to acquire
2906 // a reference as a side effect of vector operation processing. Such a
2907 // reference must be retained until after the next iteration is complete.
2908 // Therefore, move the reference from tvRef to tvRef2, so that the reference
2909 // will be released one iteration later. But only do this if tvRef was used in
2910 // this iteration, otherwise we may wipe out the last reference to something
2911 // that we need to stay alive until the next iteration.
2912 if (tvRef
.m_type
!= KindOfUninit
) {
2913 if (IS_REFCOUNTED_TYPE(tvRef2
.m_type
)) {
2915 TRACE(5, "Ratchet: decref tvref2\n");
2916 tvWriteUninit(&tvRef2
);
2919 memcpy(&tvRef2
, &tvRef
, sizeof(TypedValue
));
2920 tvWriteUninit(&tvRef
);
2921 // Update result to point to relocated reference. This can be done
2922 // unconditionally here because we maintain the invariant throughout that
2923 // either tvRef is KindOfUninit, or tvRef contains a valid object that
2924 // result points to.
2925 assert(result
== &tvRef
);
2930 #define DECLARE_MEMBERHELPER_ARGS \
2931 unsigned ndiscard; \
2933 TypedValue tvScratch; \
2934 TypedValue tvLiteral; \
2937 MemberCode mcode = MEL; \
2938 TypedValue* curMember = 0;
2939 #define DECLARE_SETHELPER_ARGS DECLARE_MEMBERHELPER_ARGS
2940 #define DECLARE_GETHELPER_ARGS \
2941 DECLARE_MEMBERHELPER_ARGS \
2944 #define MEMBERHELPERPRE_ARGS \
2945 pc, ndiscard, base, tvScratch, tvLiteral, \
2946 *tvRef.asTypedValue(), *tvRef2.asTypedValue(), mcode, curMember
2948 #define MEMBERHELPERPRE_OUT \
2949 pc, ndiscard, base, tvScratch, tvLiteral, \
2950 tvRef, tvRef2, mcode, curMember
2952 // The following arguments are outputs:
2953 // pc: bytecode instruction after the vector instruction
2954 // ndiscard: number of stack elements to discard
2955 // base: ultimate result of the vector-get
2956 // tvScratch: temporary result storage
2957 // tvRef: temporary result storage
2958 // tvRef2: temporary result storage
2959 // mcode: output MemberCode for the last member if LeaveLast
2960 // curMember: output last member value one if LeaveLast; but undefined
2961 // if the last mcode == MW
2963 // If saveResult is true, then upon completion of getHelperPre(),
2964 // tvScratch contains a reference to the result (a duplicate of what
2965 // base refers to). getHelperPost<true>(...) then saves the result
2966 // to its final location.
2967 template <bool warn
,
2969 VMExecutionContext::VectorLeaveCode mleave
>
2970 inline void OPTBLD_INLINE
VMExecutionContext::getHelperPre(
2974 TypedValue
& tvScratch
,
2975 TypedValue
& tvLiteral
,
2979 TypedValue
*& curMember
) {
2980 memberHelperPre
<false, warn
, false, false,
2981 false, 0, mleave
, saveResult
>(MEMBERHELPERPRE_OUT
);
2984 #define GETHELPERPOST_ARGS ndiscard, tvRet, tvScratch, tvRef, tvRef2
2985 template <bool saveResult
>
2986 inline void OPTBLD_INLINE
VMExecutionContext::getHelperPost(
2987 unsigned ndiscard
, TypedValue
*& tvRet
, TypedValue
& tvScratch
,
2988 Variant
& tvRef
, Variant
& tvRef2
) {
2989 // Clean up all ndiscard elements on the stack. Actually discard
2990 // only ndiscard - 1, and overwrite the last cell with the result,
2991 // or if ndiscard is zero we actually need to allocate a cell.
2992 for (unsigned depth
= 0; depth
< ndiscard
; ++depth
) {
2993 TypedValue
* tv
= m_stack
.indTV(depth
);
2994 tvRefcountedDecRef(tv
);
2998 tvRet
= m_stack
.allocTV();
3000 m_stack
.ndiscard(ndiscard
- 1);
3001 tvRet
= m_stack
.topTV();
3005 // If tvRef wasn't just allocated, we've already decref'd it in
3007 memcpy(tvRet
, &tvScratch
, sizeof(TypedValue
));
3011 #define GETHELPER_ARGS \
3012 pc, ndiscard, tvRet, base, tvScratch, tvLiteral, \
3013 tvRef, tvRef2, mcode, curMember
3014 inline void OPTBLD_INLINE
3015 VMExecutionContext::getHelper(PC
& pc
,
3019 TypedValue
& tvScratch
,
3020 TypedValue
& tvLiteral
,
3024 TypedValue
*& curMember
) {
3025 getHelperPre
<true, true, VectorLeaveCode::ConsumeAll
>(MEMBERHELPERPRE_ARGS
);
3026 getHelperPost
<true>(GETHELPERPOST_ARGS
);
3030 VMExecutionContext::getElem(TypedValue
* base
, TypedValue
* key
,
3032 assert(base
->m_type
!= KindOfArray
);
3034 tvWriteUninit(dest
);
3035 TypedValue
* result
= Elem
<true>(*dest
, *dest
, base
, key
);
3036 if (result
!= dest
) {
3037 tvDup(*result
, *dest
);
3041 template <bool setMember
,
3046 unsigned mdepth
, // extra args on stack for set (e.g. rhs)
3047 VMExecutionContext::VectorLeaveCode mleave
,
3049 inline bool OPTBLD_INLINE
VMExecutionContext::memberHelperPre(
3050 PC
& pc
, unsigned& ndiscard
, TypedValue
*& base
,
3051 TypedValue
& tvScratch
, TypedValue
& tvLiteral
,
3052 TypedValue
& tvRef
, TypedValue
& tvRef2
,
3053 MemberCode
& mcode
, TypedValue
*& curMember
) {
3054 // The caller must move pc to the vector immediate before calling
3055 // {get, set}HelperPre.
3056 const ImmVector immVec
= ImmVector::createFromStream(pc
);
3057 const uint8_t* vec
= immVec
.vec();
3058 assert(immVec
.size() > 0);
3060 // PC needs to be advanced before we do anything, otherwise if we
3061 // raise a notice in the middle of this we could resume at the wrong
3063 pc
+= immVec
.size() + sizeof(int32_t) + sizeof(int32_t);
3066 assert(mdepth
== 0);
3071 ndiscard
= immVec
.numStackValues();
3072 int depth
= mdepth
+ ndiscard
- 1;
3073 const LocationCode lcode
= LocationCode(*vec
++);
3075 TypedValue
* loc
= nullptr;
3077 Class
* const ctx
= arGetContextClass(getFP());
3080 TypedValue
* fr
= nullptr;
3083 tvWriteUninit(&tvScratch
);
3087 loc
= frame_local_inner(m_fp
, decodeVariableSizeImm(&vec
));
3090 loc
= m_stack
.indTV(depth
--);
3095 lookupd_var(m_fp
, name
, loc
, fr
);
3097 lookup_var(m_fp
, name
, loc
, fr
);
3099 if (fr
== nullptr) {
3101 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
3103 tvWriteNull(&dummy
);
3112 loc
= frame_local_inner(m_fp
, decodeVariableSizeImm(&vec
));
3115 loc
= m_stack
.indTV(depth
--);
3120 lookupd_gbl(m_fp
, name
, loc
, fr
);
3122 lookup_gbl(m_fp
, name
, loc
, fr
);
3124 if (fr
== nullptr) {
3126 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
3128 tvWriteNull(&dummy
);
3137 cref
= m_stack
.indTV(mdepth
);
3138 pname
= m_stack
.indTV(depth
--);
3141 cref
= m_stack
.indTV(mdepth
);
3142 pname
= frame_local_inner(m_fp
, decodeVariableSizeImm(&vec
));
3146 bool visible
, accessible
;
3147 assert(cref
->m_type
== KindOfClass
);
3148 const Class
* class_
= cref
->m_data
.pcls
;
3149 StringData
* name
= lookup_name(pname
);
3150 loc
= class_
->getSProp(ctx
, name
, visible
, accessible
);
3151 if (!(visible
&& accessible
)) {
3152 raise_error("Invalid static property access: %s::%s",
3153 class_
->name()->data(),
3161 int localInd
= decodeVariableSizeImm(&vec
);
3162 loc
= frame_local_inner(m_fp
, localInd
);
3164 if (loc
->m_type
== KindOfUninit
) {
3165 raise_notice(Strings::UNDEFINED_VARIABLE
,
3166 m_fp
->m_func
->localVarName(localInd
)->data());
3173 loc
= m_stack
.indTV(depth
--);
3176 assert(m_fp
->hasThis());
3177 tvScratch
.m_type
= KindOfObject
;
3178 tvScratch
.m_data
.pobj
= m_fp
->getThis();
3182 default: not_reached();
3186 tvWriteUninit(&tvLiteral
);
3187 tvWriteUninit(&tvRef
);
3188 tvWriteUninit(&tvRef2
);
3190 // Iterate through the members.
3192 mcode
= MemberCode(*vec
++);
3193 if (memberCodeHasImm(mcode
)) {
3194 int64_t memberImm
= decodeMemberCodeImm(&vec
, mcode
);
3195 if (memberCodeImmIsString(mcode
)) {
3196 tvAsVariant(&tvLiteral
) =
3197 m_fp
->m_func
->unit()->lookupLitstrId(memberImm
);
3198 assert(!IS_REFCOUNTED_TYPE(tvLiteral
.m_type
));
3199 curMember
= &tvLiteral
;
3200 } else if (mcode
== MEI
) {
3201 tvAsVariant(&tvLiteral
) = memberImm
;
3202 curMember
= &tvLiteral
;
3204 assert(memberCodeImmIsLoc(mcode
));
3205 curMember
= frame_local_inner(m_fp
, memberImm
);
3208 curMember
= (setMember
&& mcode
== MW
) ? nullptr : m_stack
.indTV(depth
--);
3211 if (mleave
== VectorLeaveCode::LeaveLast
) {
3225 result
= ElemU(tvScratch
, tvRef
, base
, curMember
);
3226 } else if (define
) {
3227 result
= ElemD
<warn
,reffy
>(tvScratch
, tvRef
, base
, curMember
);
3229 result
= Elem
<warn
>(tvScratch
, tvRef
, base
, curMember
);
3235 result
= Prop
<warn
, define
, unset
>(tvScratch
, tvRef
, ctx
, base
,
3241 result
= NewElem(tvScratch
, tvRef
, base
);
3243 raise_error("Cannot use [] for reading");
3249 result
= nullptr; // Silence compiler warning.
3251 assert(result
!= nullptr);
3252 ratchetRefs(result
, tvRef
, tvRef2
);
3253 // Check whether an error occurred (i.e. no result was set).
3254 if (setMember
&& result
== &tvScratch
&& result
->m_type
== KindOfUninit
) {
3260 if (mleave
== VectorLeaveCode::ConsumeAll
) {
3263 if (lcode
== LSC
|| lcode
== LSL
) {
3264 assert(depth
== int(mdepth
));
3266 assert(depth
== int(mdepth
) - 1);
3273 // If requested, save a copy of the result. If base already points to
3274 // tvScratch, no reference counting is necessary, because (with the
3275 // exception of the following block), tvScratch is never populated such
3276 // that it owns a reference that must be accounted for.
3277 if (base
!= &tvScratch
) {
3278 // Acquire a reference to the result via tvDup(); base points to the
3279 // result but does not own a reference.
3280 tvDup(*base
, tvScratch
);
3287 // The following arguments are outputs: (TODO put them in struct)
3288 // pc: bytecode instruction after the vector instruction
3289 // ndiscard: number of stack elements to discard
3290 // base: ultimate result of the vector-get
3291 // tvScratch: temporary result storage
3292 // tvRef: temporary result storage
3293 // tvRef2: temporary result storage
3294 // mcode: output MemberCode for the last member if LeaveLast
3295 // curMember: output last member value one if LeaveLast; but undefined
3296 // if the last mcode == MW
3297 template <bool warn
,
3301 unsigned mdepth
, // extra args on stack for set (e.g. rhs)
3302 VMExecutionContext::VectorLeaveCode mleave
>
3303 inline bool OPTBLD_INLINE
VMExecutionContext::setHelperPre(
3304 PC
& pc
, unsigned& ndiscard
, TypedValue
*& base
,
3305 TypedValue
& tvScratch
, TypedValue
& tvLiteral
,
3306 TypedValue
& tvRef
, TypedValue
& tvRef2
,
3307 MemberCode
& mcode
, TypedValue
*& curMember
) {
3308 return memberHelperPre
<true, warn
, define
, unset
,
3309 reffy
, mdepth
, mleave
, false>(MEMBERHELPERPRE_OUT
);
3312 #define SETHELPERPOST_ARGS ndiscard, tvRef, tvRef2
3313 template <unsigned mdepth
>
3314 inline void OPTBLD_INLINE
VMExecutionContext::setHelperPost(
3315 unsigned ndiscard
, Variant
& tvRef
, Variant
& tvRef2
) {
3316 // Clean up the stack. Decref all the elements for the vector, but
3317 // leave the first mdepth (they are not part of the vector data).
3318 for (unsigned depth
= mdepth
; depth
-mdepth
< ndiscard
; ++depth
) {
3319 TypedValue
* tv
= m_stack
.indTV(depth
);
3320 tvRefcountedDecRef(tv
);
3323 // NOTE: currently the only instructions using this that have return
3324 // values on the stack also have more inputs than the -vector, so
3325 // mdepth > 0. They also always return the original top value of
3328 assert(mdepth
== 1 &&
3329 "We don't really support mdepth > 1 in setHelperPost");
3332 TypedValue
* retSrc
= m_stack
.topTV();
3333 TypedValue
* dest
= m_stack
.indTV(ndiscard
+ mdepth
- 1);
3334 assert(dest
!= retSrc
);
3335 memcpy(dest
, retSrc
, sizeof *dest
);
3339 m_stack
.ndiscard(ndiscard
);
3342 inline void OPTBLD_INLINE
VMExecutionContext::iopLowInvalid(PC
& pc
) {
3343 fprintf(stderr
, "invalid bytecode executed\n");
3347 inline void OPTBLD_INLINE
VMExecutionContext::iopNop(PC
& pc
) {
3351 inline void OPTBLD_INLINE
VMExecutionContext::iopPopC(PC
& pc
) {
3356 inline void OPTBLD_INLINE
VMExecutionContext::iopPopV(PC
& pc
) {
3361 inline void OPTBLD_INLINE
VMExecutionContext::iopPopR(PC
& pc
) {
3363 if (m_stack
.topTV()->m_type
!= KindOfRef
) {
3370 inline void OPTBLD_INLINE
VMExecutionContext::iopDup(PC
& pc
) {
3375 inline void OPTBLD_INLINE
VMExecutionContext::iopBox(PC
& pc
) {
3380 inline void OPTBLD_INLINE
VMExecutionContext::iopUnbox(PC
& pc
) {
3385 inline void OPTBLD_INLINE
VMExecutionContext::iopBoxR(PC
& pc
) {
3387 TypedValue
* tv
= m_stack
.topTV();
3388 if (tv
->m_type
!= KindOfRef
) {
3393 inline void OPTBLD_INLINE
VMExecutionContext::iopUnboxR(PC
& pc
) {
3395 if (m_stack
.topTV()->m_type
== KindOfRef
) {
3400 inline void OPTBLD_INLINE
VMExecutionContext::iopNull(PC
& pc
) {
3405 inline void OPTBLD_INLINE
VMExecutionContext::iopNullUninit(PC
& pc
) {
3407 m_stack
.pushNullUninit();
3410 inline void OPTBLD_INLINE
VMExecutionContext::iopTrue(PC
& pc
) {
3415 inline void OPTBLD_INLINE
VMExecutionContext::iopFalse(PC
& pc
) {
3417 m_stack
.pushFalse();
3420 inline void OPTBLD_INLINE
VMExecutionContext::iopFile(PC
& pc
) {
3422 const StringData
* s
= m_fp
->m_func
->unit()->filepath();
3423 m_stack
.pushStaticString(const_cast<StringData
*>(s
));
3426 inline void OPTBLD_INLINE
VMExecutionContext::iopDir(PC
& pc
) {
3428 const StringData
* s
= m_fp
->m_func
->unit()->dirpath();
3429 m_stack
.pushStaticString(const_cast<StringData
*>(s
));
3432 inline void OPTBLD_INLINE
VMExecutionContext::iopInt(PC
& pc
) {
3438 inline void OPTBLD_INLINE
VMExecutionContext::iopDouble(PC
& pc
) {
3441 m_stack
.pushDouble(d
);
3444 inline void OPTBLD_INLINE
VMExecutionContext::iopString(PC
& pc
) {
3447 m_stack
.pushStaticString(s
);
3450 inline void OPTBLD_INLINE
VMExecutionContext::iopArray(PC
& pc
) {
3453 ArrayData
* a
= m_fp
->m_func
->unit()->lookupArrayId(id
);
3454 m_stack
.pushStaticArray(a
);
3457 inline void OPTBLD_INLINE
VMExecutionContext::iopNewArray(PC
& pc
) {
3459 // Clever sizing avoids extra work in HphpArray construction.
3460 auto arr
= ArrayData::Make(size_t(3U) << (HphpArray::MinLgTableSize
-2));
3461 m_stack
.pushArray(arr
);
3464 inline void OPTBLD_INLINE
VMExecutionContext::iopNewTuple(PC
& pc
) {
3467 // This constructor moves values, no inc/decref is necessary.
3468 HphpArray
* arr
= ArrayData::Make(n
, m_stack
.topC());
3469 m_stack
.ndiscard(n
);
3470 m_stack
.pushArray(arr
);
3473 inline void OPTBLD_INLINE
VMExecutionContext::iopAddElemC(PC
& pc
) {
3475 Cell
* c1
= m_stack
.topC();
3476 Cell
* c2
= m_stack
.indC(1);
3477 Cell
* c3
= m_stack
.indC(2);
3478 if (c3
->m_type
!= KindOfArray
) {
3479 raise_error("AddElemC: $3 must be an array");
3481 if (c2
->m_type
== KindOfInt64
) {
3482 tvCellAsVariant(c3
).asArrRef().set(c2
->m_data
.num
, tvAsCVarRef(c1
));
3484 tvCellAsVariant(c3
).asArrRef().set(tvAsCVarRef(c2
), tvAsCVarRef(c1
));
3490 inline void OPTBLD_INLINE
VMExecutionContext::iopAddElemV(PC
& pc
) {
3492 Var
* v1
= m_stack
.topV();
3493 Cell
* c2
= m_stack
.indC(1);
3494 Cell
* c3
= m_stack
.indC(2);
3495 if (c3
->m_type
!= KindOfArray
) {
3496 raise_error("AddElemV: $3 must be an array");
3498 if (c2
->m_type
== KindOfInt64
) {
3499 tvCellAsVariant(c3
).asArrRef().set(c2
->m_data
.num
, ref(tvAsCVarRef(v1
)));
3501 tvCellAsVariant(c3
).asArrRef().set(tvAsCVarRef(c2
), ref(tvAsCVarRef(v1
)));
3507 inline void OPTBLD_INLINE
VMExecutionContext::iopAddNewElemC(PC
& pc
) {
3509 Cell
* c1
= m_stack
.topC();
3510 Cell
* c2
= m_stack
.indC(1);
3511 if (c2
->m_type
!= KindOfArray
) {
3512 raise_error("AddNewElemC: $2 must be an array");
3514 tvCellAsVariant(c2
).asArrRef().append(tvAsCVarRef(c1
));
3518 inline void OPTBLD_INLINE
VMExecutionContext::iopAddNewElemV(PC
& pc
) {
3520 Var
* v1
= m_stack
.topV();
3521 Cell
* c2
= m_stack
.indC(1);
3522 if (c2
->m_type
!= KindOfArray
) {
3523 raise_error("AddNewElemV: $2 must be an array");
3525 tvCellAsVariant(c2
).asArrRef().append(ref(tvAsCVarRef(v1
)));
3529 inline void OPTBLD_INLINE
VMExecutionContext::iopNewCol(PC
& pc
) {
3535 case Collection::VectorType
: obj
= NEWOBJ(c_Vector
)(); break;
3536 case Collection::MapType
: obj
= NEWOBJ(c_Map
)(); break;
3537 case Collection::StableMapType
: obj
= NEWOBJ(c_StableMap
)(); break;
3538 case Collection::SetType
: obj
= NEWOBJ(c_Set
)(); break;
3539 case Collection::PairType
: obj
= NEWOBJ(c_Pair
)(); break;
3542 raise_error("NewCol: Invalid collection type");
3545 // Reserve enough room for nElms elements in advance
3547 collectionReserve(obj
, nElms
);
3549 m_stack
.pushObject(obj
);
3552 inline void OPTBLD_INLINE
VMExecutionContext::iopColAddNewElemC(PC
& pc
) {
3554 Cell
* c1
= m_stack
.topC();
3555 Cell
* c2
= m_stack
.indC(1);
3556 if (c2
->m_type
== KindOfObject
&& c2
->m_data
.pobj
->isCollection()) {
3557 collectionAppend(c2
->m_data
.pobj
, c1
);
3559 raise_error("ColAddNewElemC: $2 must be a collection");
3564 inline void OPTBLD_INLINE
VMExecutionContext::iopColAddElemC(PC
& pc
) {
3566 Cell
* c1
= m_stack
.topC();
3567 Cell
* c2
= m_stack
.indC(1);
3568 Cell
* c3
= m_stack
.indC(2);
3569 if (c3
->m_type
== KindOfObject
&& c3
->m_data
.pobj
->isCollection()) {
3570 collectionSet(c3
->m_data
.pobj
, c2
, c1
);
3572 raise_error("ColAddElemC: $3 must be a collection");
3578 inline void OPTBLD_INLINE
VMExecutionContext::iopCns(PC
& pc
) {
3581 TypedValue
* cns
= Unit::loadCns(s
);
3582 if (cns
== nullptr) {
3583 raise_notice(Strings::UNDEFINED_CONSTANT
, s
->data(), s
->data());
3584 m_stack
.pushStaticString(s
);
3587 Cell
* c1
= m_stack
.allocC();
3588 tvReadCell(cns
, c1
);
3591 inline void OPTBLD_INLINE
VMExecutionContext::iopCnsE(PC
& pc
) {
3594 TypedValue
* cns
= Unit::loadCns(s
);
3595 if (cns
== nullptr) {
3596 raise_error("Undefined constant '%s'", s
->data());
3598 Cell
* c1
= m_stack
.allocC();
3599 tvReadCell(cns
, c1
);
3602 inline void OPTBLD_INLINE
VMExecutionContext::iopCnsU(PC
& pc
) {
3604 DECODE_LITSTR(name
);
3605 DECODE_LITSTR(fallback
);
3606 TypedValue
* cns
= Unit::loadCns(name
);
3607 if (cns
== nullptr) {
3608 cns
= Unit::loadCns(fallback
);
3609 if (cns
== nullptr) {
3611 Strings::UNDEFINED_CONSTANT
,
3615 m_stack
.pushStaticString(fallback
);
3619 Cell
* c1
= m_stack
.allocC();
3620 tvReadCell(cns
, c1
);
3623 inline void OPTBLD_INLINE
VMExecutionContext::iopDefCns(PC
& pc
) {
3626 TypedValue
* tv
= m_stack
.topTV();
3627 tvAsVariant(tv
) = Unit::defCns(s
, tv
);
3630 inline void OPTBLD_INLINE
VMExecutionContext::iopClsCns(PC
& pc
) {
3632 DECODE_LITSTR(clsCnsName
);
3633 TypedValue
* tv
= m_stack
.topTV();
3634 assert(tv
->m_type
== KindOfClass
);
3635 Class
* class_
= tv
->m_data
.pcls
;
3636 assert(class_
!= nullptr);
3637 TypedValue
* clsCns
= class_
->clsCnsGet(clsCnsName
);
3638 if (clsCns
== nullptr) {
3639 raise_error("Couldn't find constant %s::%s",
3640 class_
->name()->data(), clsCnsName
->data());
3642 tvReadCell(clsCns
, tv
);
3645 inline void OPTBLD_INLINE
VMExecutionContext::iopClsCnsD(PC
& pc
) {
3647 DECODE_LITSTR(clsCnsName
);
3648 DECODE(Id
, classId
);
3649 const NamedEntityPair
& classNamedEntity
=
3650 m_fp
->m_func
->unit()->lookupNamedEntityPairId(classId
);
3652 TypedValue
* clsCns
= lookupClsCns(classNamedEntity
.second
,
3653 classNamedEntity
.first
, clsCnsName
);
3654 assert(clsCns
!= nullptr);
3655 Cell
* c1
= m_stack
.allocC();
3656 tvReadCell(clsCns
, c1
);
3659 inline void OPTBLD_INLINE
VMExecutionContext::iopConcat(PC
& pc
) {
3661 Cell
* c1
= m_stack
.topC();
3662 Cell
* c2
= m_stack
.indC(1);
3663 if (IS_STRING_TYPE(c1
->m_type
) && IS_STRING_TYPE(c2
->m_type
)) {
3664 tvCellAsVariant(c2
) = concat(
3665 tvCellAsVariant(c2
).toString(), tvCellAsCVarRef(c1
).toString());
3667 tvCellAsVariant(c2
) = concat(tvCellAsVariant(c2
).toString(),
3668 tvCellAsCVarRef(c1
).toString());
3670 assert(c2
->m_data
.pstr
->getCount() > 0);
3674 inline void OPTBLD_INLINE
VMExecutionContext::iopNot(PC
& pc
) {
3676 Cell
* c1
= m_stack
.topC();
3677 tvCellAsVariant(c1
) = !tvCellAsVariant(c1
).toBoolean();
3681 void OPTBLD_INLINE
VMExecutionContext::implCellBinOp(PC
& pc
, Op op
) {
3683 auto const c1
= m_stack
.topC();
3684 auto const c2
= m_stack
.indC(1);
3685 auto const result
= op(*c2
, *c1
);
3686 tvRefcountedDecRefCell(c2
);
3691 inline void OPTBLD_INLINE
VMExecutionContext::iopAdd(PC
& pc
) {
3692 implCellBinOp(pc
, cellAdd
);
3695 inline void OPTBLD_INLINE
VMExecutionContext::iopSub(PC
& pc
) {
3696 implCellBinOp(pc
, cellSub
);
3699 inline void OPTBLD_INLINE
VMExecutionContext::iopMul(PC
& pc
) {
3700 implCellBinOp(pc
, cellMul
);
3703 inline void OPTBLD_INLINE
VMExecutionContext::iopDiv(PC
& pc
) {
3704 implCellBinOp(pc
, cellDiv
);
3707 inline void OPTBLD_INLINE
VMExecutionContext::iopMod(PC
& pc
) {
3708 implCellBinOp(pc
, cellMod
);
3712 void OPTBLD_INLINE
VMExecutionContext::implCellBinOpBool(PC
& pc
, Op op
) {
3714 auto const c1
= m_stack
.topC();
3715 auto const c2
= m_stack
.indC(1);
3716 bool const result
= op(*c2
, *c1
);
3717 tvRefcountedDecRefCell(c2
);
3718 *c2
= make_tv
<KindOfBoolean
>(result
);
3722 inline void OPTBLD_INLINE
VMExecutionContext::iopXor(PC
& pc
) {
3723 implCellBinOpBool(pc
, [&] (Cell c1
, Cell c2
) -> bool {
3724 return cellToBool(c1
) ^ cellToBool(c2
);
3728 inline void OPTBLD_INLINE
VMExecutionContext::iopSame(PC
& pc
) {
3729 implCellBinOpBool(pc
, cellSame
);
3732 inline void OPTBLD_INLINE
VMExecutionContext::iopNSame(PC
& pc
) {
3733 implCellBinOpBool(pc
, [&] (Cell c1
, Cell c2
) {
3734 return !cellSame(c1
, c2
);
3738 inline void OPTBLD_INLINE
VMExecutionContext::iopEq(PC
& pc
) {
3739 implCellBinOpBool(pc
, [&] (Cell c1
, Cell c2
) {
3740 return cellEqual(c1
, c2
);
3744 inline void OPTBLD_INLINE
VMExecutionContext::iopNeq(PC
& pc
) {
3745 implCellBinOpBool(pc
, [&] (Cell c1
, Cell c2
) {
3746 return !cellEqual(c1
, c2
);
3750 inline void OPTBLD_INLINE
VMExecutionContext::iopLt(PC
& pc
) {
3751 implCellBinOpBool(pc
, [&] (Cell c1
, Cell c2
) {
3752 return cellLess(c1
, c2
);
3756 inline void OPTBLD_INLINE
VMExecutionContext::iopLte(PC
& pc
) {
3757 implCellBinOpBool(pc
, cellLessOrEqual
);
3760 inline void OPTBLD_INLINE
VMExecutionContext::iopGt(PC
& pc
) {
3761 implCellBinOpBool(pc
, [&] (Cell c1
, Cell c2
) {
3762 return cellGreater(c1
, c2
);
3766 inline void OPTBLD_INLINE
VMExecutionContext::iopGte(PC
& pc
) {
3767 implCellBinOpBool(pc
, cellGreaterOrEqual
);
3770 #define MATHOP(OP, VOP) do { \
3772 Cell* c1 = m_stack.topC(); \
3773 Cell* c2 = m_stack.indC(1); \
3774 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
3775 int64_t a = c2->m_data.num; \
3776 int64_t b = c1->m_data.num; \
3777 MATHOP_DIVCHECK(0) \
3778 c2->m_data.num = a OP b; \
3783 tvCellAsVariant(c2) = VOP(tvCellAsVariant(c2), tvCellAsCVarRef(c1)); \
3788 #define MATHOP_DOUBLE(OP)
3789 #define MATHOP_DIVCHECK(x)
3790 inline void OPTBLD_INLINE
VMExecutionContext::iopBitAnd(PC
& pc
) {
3791 MATHOP(&, bitwise_and
);
3794 inline void OPTBLD_INLINE
VMExecutionContext::iopBitOr(PC
& pc
) {
3795 MATHOP(|, bitwise_or
);
3798 inline void OPTBLD_INLINE
VMExecutionContext::iopBitXor(PC
& pc
) {
3799 MATHOP(^, bitwise_xor
);
3802 #undef MATHOP_DOUBLE
3803 #undef MATHOP_DIVCHECK
3805 inline void OPTBLD_INLINE
VMExecutionContext::iopBitNot(PC
& pc
) {
3807 Cell
* c1
= m_stack
.topC();
3808 if (LIKELY(c1
->m_type
== KindOfInt64
)) {
3809 c1
->m_data
.num
= ~c1
->m_data
.num
;
3810 } else if (c1
->m_type
== KindOfDouble
) {
3811 c1
->m_type
= KindOfInt64
;
3812 c1
->m_data
.num
= ~int64_t(c1
->m_data
.dbl
);
3813 } else if (IS_STRING_TYPE(c1
->m_type
)) {
3814 tvCellAsVariant(c1
) = tvCellAsVariant(c1
).bitNot();
3816 raise_error("Unsupported operand type for ~");
3820 #define SHIFTOP(OP) do { \
3822 Cell* c1 = m_stack.topC(); \
3823 Cell* c2 = m_stack.indC(1); \
3824 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
3825 int64_t a = c2->m_data.num; \
3826 int64_t b = c1->m_data.num; \
3827 c2->m_data.num = a OP b; \
3830 tvCellAsVariant(c2) = tvCellAsVariant(c2).toInt64() OP \
3831 tvCellAsCVarRef(c1).toInt64(); \
3835 inline void OPTBLD_INLINE
VMExecutionContext::iopShl(PC
& pc
) {
3839 inline void OPTBLD_INLINE
VMExecutionContext::iopShr(PC
& pc
) {
3844 inline void OPTBLD_INLINE
VMExecutionContext::iopCastBool(PC
& pc
) {
3846 Cell
* c1
= m_stack
.topC();
3847 tvCastToBooleanInPlace(c1
);
3850 inline void OPTBLD_INLINE
VMExecutionContext::iopCastInt(PC
& pc
) {
3852 Cell
* c1
= m_stack
.topC();
3853 tvCastToInt64InPlace(c1
);
3856 inline void OPTBLD_INLINE
VMExecutionContext::iopCastDouble(PC
& pc
) {
3858 Cell
* c1
= m_stack
.topC();
3859 tvCastToDoubleInPlace(c1
);
3862 inline void OPTBLD_INLINE
VMExecutionContext::iopCastString(PC
& pc
) {
3864 Cell
* c1
= m_stack
.topC();
3865 tvCastToStringInPlace(c1
);
3868 inline void OPTBLD_INLINE
VMExecutionContext::iopCastArray(PC
& pc
) {
3870 Cell
* c1
= m_stack
.topC();
3871 tvCastToArrayInPlace(c1
);
3874 inline void OPTBLD_INLINE
VMExecutionContext::iopCastObject(PC
& pc
) {
3876 Cell
* c1
= m_stack
.topC();
3877 tvCastToObjectInPlace(c1
);
3880 inline bool OPTBLD_INLINE
VMExecutionContext::cellInstanceOf(
3881 TypedValue
* tv
, const NamedEntity
* ne
) {
3882 assert(tv
->m_type
!= KindOfRef
);
3883 if (tv
->m_type
== KindOfObject
) {
3884 Class
* cls
= Unit::lookupClass(ne
);
3885 if (cls
) return tv
->m_data
.pobj
->instanceof(cls
);
3886 } else if (tv
->m_type
== KindOfArray
) {
3887 Class
* cls
= Unit::lookupClass(ne
);
3888 if (cls
&& interface_supports_array(cls
->name())) {
3895 inline void OPTBLD_INLINE
VMExecutionContext::iopInstanceOf(PC
& pc
) {
3897 Cell
* c1
= m_stack
.topC(); // c2 instanceof c1
3898 Cell
* c2
= m_stack
.indC(1);
3900 if (IS_STRING_TYPE(c1
->m_type
)) {
3901 const NamedEntity
* rhs
= Unit::GetNamedEntity(c1
->m_data
.pstr
);
3902 r
= cellInstanceOf(c2
, rhs
);
3903 } else if (c1
->m_type
== KindOfObject
) {
3904 if (c2
->m_type
== KindOfObject
) {
3905 ObjectData
* lhs
= c2
->m_data
.pobj
;
3906 ObjectData
* rhs
= c1
->m_data
.pobj
;
3907 r
= lhs
->instanceof(rhs
->getVMClass());
3910 raise_error("Class name must be a valid object or a string");
3913 tvRefcountedDecRefCell(c2
);
3915 c2
->m_type
= KindOfBoolean
;
3918 inline void OPTBLD_INLINE
VMExecutionContext::iopInstanceOfD(PC
& pc
) {
3921 if (shouldProfile()) {
3922 Class::profileInstanceOf(m_fp
->m_func
->unit()->lookupLitstrId(id
));
3924 const NamedEntity
* ne
= m_fp
->m_func
->unit()->lookupNamedEntityId(id
);
3925 Cell
* c1
= m_stack
.topC();
3926 bool r
= cellInstanceOf(c1
, ne
);
3927 tvRefcountedDecRefCell(c1
);
3929 c1
->m_type
= KindOfBoolean
;
3932 inline void OPTBLD_INLINE
VMExecutionContext::iopPrint(PC
& pc
) {
3934 Cell
* c1
= m_stack
.topC();
3935 echo(tvCellAsVariant(c1
).toString());
3936 tvRefcountedDecRefCell(c1
);
3937 c1
->m_type
= KindOfInt64
;
3941 inline void OPTBLD_INLINE
VMExecutionContext::iopClone(PC
& pc
) {
3943 TypedValue
* tv
= m_stack
.topTV();
3944 if (tv
->m_type
!= KindOfObject
) {
3945 raise_error("clone called on non-object");
3947 ObjectData
* obj
= tv
->m_data
.pobj
;
3948 const Class
* class_ UNUSED
= obj
->getVMClass();
3949 ObjectData
* newobj
= obj
->clone();
3952 tv
->m_type
= KindOfObject
;
3953 tv
->m_data
.pobj
= newobj
;
3956 inline void OPTBLD_INLINE
VMExecutionContext::iopExit(PC
& pc
) {
3959 Cell
* c1
= m_stack
.topC();
3960 if (c1
->m_type
== KindOfInt64
) {
3961 exitCode
= c1
->m_data
.num
;
3963 echo(tvCellAsVariant(c1
).toString());
3967 throw ExitException(exitCode
);
3970 inline void OPTBLD_INLINE
VMExecutionContext::iopFatal(PC
& pc
) {
3972 TypedValue
* top
= m_stack
.topTV();
3974 DECODE_IVA(skipFrame
);
3975 if (IS_STRING_TYPE(top
->m_type
)) {
3976 msg
= top
->m_data
.pstr
->data();
3978 msg
= "Fatal error message not a string";
3982 raise_error_without_first_frame(msg
);
3988 inline void OPTBLD_INLINE
VMExecutionContext::jmpSurpriseCheck(Offset offset
) {
3989 if (offset
< 0 && UNLIKELY(Transl::TargetCache::loadConditionFlags())) {
3990 EventHook::CheckSurprise();
3994 inline void OPTBLD_INLINE
VMExecutionContext::iopJmp(PC
& pc
) {
3996 DECODE_JMP(Offset
, offset
);
3997 jmpSurpriseCheck(offset
);
4003 inline void OPTBLD_INLINE
VMExecutionContext::jmpOpImpl(PC
& pc
) {
4004 static_assert(op
== OpJmpZ
|| op
== OpJmpNZ
,
4005 "jmpOpImpl should only be used by JmpZ and JmpNZ");
4007 DECODE_JMP(Offset
, offset
);
4008 jmpSurpriseCheck(offset
);
4010 Cell
* c1
= m_stack
.topC();
4011 if (c1
->m_type
== KindOfInt64
|| c1
->m_type
== KindOfBoolean
) {
4012 int64_t n
= c1
->m_data
.num
;
4013 if (op
== OpJmpZ
? n
== 0 : n
!= 0) {
4017 pc
+= sizeof(Offset
);
4021 auto const condition
= toBoolean(tvCellAsCVarRef(c1
));
4022 if (op
== OpJmpZ
? !condition
: condition
) {
4026 pc
+= sizeof(Offset
);
4032 inline void OPTBLD_INLINE
VMExecutionContext::iopJmpZ(PC
& pc
) {
4033 jmpOpImpl
<OpJmpZ
>(pc
);
4036 inline void OPTBLD_INLINE
VMExecutionContext::iopJmpNZ(PC
& pc
) {
4037 jmpOpImpl
<OpJmpNZ
>(pc
);
4040 #define FREE_ITER_LIST(typeList, idList, vecLen) do { \
4042 for (iterIndex = 0; iterIndex < 2 * veclen; iterIndex += 2) { \
4043 Id iterType = typeList[iterIndex]; \
4044 Id iterId = idList[iterIndex]; \
4046 Iter *iter = frame_iter(m_fp, iterId); \
4048 switch (iterType) { \
4049 case KindOfIter: iter->free(); break; \
4050 case KindOfMIter: iter->mfree(); break; \
4051 case KindOfCIter: iter->cfree(); break; \
4056 inline void OPTBLD_INLINE
VMExecutionContext::iopIterBreak(PC
& pc
) {
4059 DECODE_ITER_LIST(iterTypeList
, iterIdList
, veclen
);
4060 DECODE_JMP(Offset
, offset
);
4062 jmpSurpriseCheck(offset
); // we do this early so iterators are still dirty if
4063 // we have an exception
4065 FREE_ITER_LIST(iterTypeList
, iterIdList
, veclen
);
4066 pc
= savedPc
+ offset
;
4069 #undef FREE_ITER_LIST
4071 enum class SwitchMatch
{
4072 NORMAL
, // value was converted to an int: match normally
4073 NONZERO
, // can't be converted to an int: match first nonzero case
4074 DEFAULT
, // can't be converted to an int: match default case
4077 static SwitchMatch
doubleCheck(double d
, int64_t& out
) {
4078 if (int64_t(d
) == d
) {
4080 return SwitchMatch::NORMAL
;
4082 return SwitchMatch::DEFAULT
;
4086 inline void OPTBLD_INLINE
VMExecutionContext::iopSwitch(PC
& pc
) {
4089 DECODE(int32_t, veclen
);
4091 Offset
* jmptab
= (Offset
*)pc
;
4092 pc
+= veclen
* sizeof(*jmptab
);
4093 DECODE(int64_t, base
);
4094 DECODE_IVA(bounded
);
4096 TypedValue
* val
= m_stack
.topTV();
4098 assert(val
->m_type
== KindOfInt64
);
4099 // Continuation switch: no bounds checking needed
4100 int64_t label
= val
->m_data
.num
;
4102 assert(label
>= 0 && label
< veclen
);
4103 pc
= origPC
+ jmptab
[label
];
4105 // Generic integer switch
4107 SwitchMatch match
= SwitchMatch::NORMAL
;
4109 switch (val
->m_type
) {
4116 // bool(true) is equal to any non-zero int, bool(false) == 0
4117 if (val
->m_data
.num
) {
4118 match
= SwitchMatch::NONZERO
;
4125 intval
= val
->m_data
.num
;
4129 match
= doubleCheck(val
->m_data
.dbl
, intval
);
4132 case KindOfStaticString
:
4133 case KindOfString
: {
4135 DataType t
= val
->m_data
.pstr
->isNumericWithVal(intval
, dval
, 1);
4142 match
= doubleCheck(dval
, intval
);
4152 tvRefcountedDecRef(val
);
4157 match
= SwitchMatch::DEFAULT
;
4162 intval
= val
->m_data
.pobj
->o_toInt64();
4171 if (match
!= SwitchMatch::NORMAL
||
4172 intval
< base
|| intval
>= (base
+ veclen
- 2)) {
4174 case SwitchMatch::NORMAL
:
4175 case SwitchMatch::DEFAULT
:
4176 pc
= origPC
+ jmptab
[veclen
- 1];
4179 case SwitchMatch::NONZERO
:
4180 pc
= origPC
+ jmptab
[veclen
- 2];
4184 pc
= origPC
+ jmptab
[intval
- base
];
4189 inline void OPTBLD_INLINE
VMExecutionContext::iopSSwitch(PC
& pc
) {
4192 DECODE(int32_t, veclen
);
4194 unsigned cases
= veclen
- 1; // the last vector item is the default case
4195 StrVecItem
* jmptab
= (StrVecItem
*)pc
;
4196 pc
+= veclen
* sizeof(*jmptab
);
4198 Cell
* val
= tvToCell(m_stack
.topTV());
4199 Unit
* u
= m_fp
->m_func
->unit();
4201 for (i
= 0; i
< cases
; ++i
) {
4202 auto& item
= jmptab
[i
];
4203 const StringData
* str
= u
->lookupLitstrId(item
.str
);
4204 if (cellEqual(*val
, str
)) {
4205 pc
= origPC
+ item
.dest
;
4211 pc
= origPC
+ jmptab
[veclen
-1].dest
;
4216 inline void OPTBLD_INLINE
VMExecutionContext::iopRetC(PC
& pc
) {
4218 uint soff
= m_fp
->m_soff
;
4219 assert(!m_fp
->m_func
->isGenerator());
4221 // Call the runtime helpers to free the local variables and iterators
4222 frame_free_locals_inl(m_fp
, m_fp
->m_func
->numLocals());
4223 ActRec
* sfp
= m_fp
->arGetSfp();
4224 // Memcpy the the return value on top of the activation record. This works
4225 // the same regardless of whether the return value is boxed or not.
4226 TypedValue
* retval_ptr
= &m_fp
->m_r
;
4227 memcpy(retval_ptr
, m_stack
.topTV(), sizeof(TypedValue
));
4229 m_stack
.ndiscard(m_fp
->m_func
->numSlotsInFrame() + 1);
4231 if (LIKELY(sfp
!= m_fp
)) {
4232 // Restore caller's execution state.
4234 pc
= m_fp
->m_func
->unit()->entry() + m_fp
->m_func
->base() + soff
;
4236 assert(m_stack
.topTV() == retval_ptr
);
4238 // No caller; terminate.
4242 std::ostringstream os
;
4243 os
<< toStringElm(m_stack
.topTV());
4245 Trace::trace("Return %s from VMExecutionContext::dispatch("
4246 "%p)\n", os
.str().c_str(), m_fp
));
4253 inline void OPTBLD_INLINE
VMExecutionContext::iopRetV(PC
& pc
) {
4257 inline void OPTBLD_INLINE
VMExecutionContext::iopUnwind(PC
& pc
) {
4258 assert(!m_faults
.empty());
4259 assert(m_faults
.back().m_savedRaiseOffset
!= kInvalidOffset
);
4260 throw VMPrepareUnwind();
4263 inline void OPTBLD_INLINE
VMExecutionContext::iopThrow(PC
& pc
) {
4264 Cell
* c1
= m_stack
.topC();
4265 if (c1
->m_type
!= KindOfObject
||
4266 !c1
->m_data
.pobj
->instanceof(SystemLib::s_ExceptionClass
)) {
4267 raise_error("Exceptions must be valid objects derived from the "
4268 "Exception base class");
4271 Object
obj(c1
->m_data
.pobj
);
4273 DEBUGGER_ATTACHED_ONLY(phpDebuggerExceptionThrownHook(obj
.get()));
4277 inline void OPTBLD_INLINE
VMExecutionContext::iopAGetC(PC
& pc
) {
4279 TypedValue
* tv
= m_stack
.topTV();
4280 lookupClsRef(tv
, tv
, true);
4283 inline void OPTBLD_INLINE
VMExecutionContext::iopAGetL(PC
& pc
) {
4286 TypedValue
* top
= m_stack
.allocTV();
4287 TypedValue
* fr
= frame_local_inner(m_fp
, local
);
4288 lookupClsRef(fr
, top
);
4291 static void raise_undefined_local(ActRec
* fp
, Id pind
) {
4292 assert(pind
< fp
->m_func
->numNamedLocals());
4293 raise_notice(Strings::UNDEFINED_VARIABLE
,
4294 fp
->m_func
->localVarName(pind
)->data());
4297 static inline void cgetl_inner_body(TypedValue
* fr
, TypedValue
* to
) {
4298 assert(fr
->m_type
!= KindOfUninit
);
4300 if (to
->m_type
== KindOfRef
) {
4305 static inline void cgetl_body(ActRec
* fp
,
4309 if (fr
->m_type
== KindOfUninit
) {
4310 // `to' is uninitialized here, so we need to tvWriteNull before
4311 // possibly causing stack unwinding.
4313 raise_undefined_local(fp
, pind
);
4315 cgetl_inner_body(fr
, to
);
4319 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetL(PC
& pc
) {
4322 Cell
* to
= m_stack
.allocC();
4323 TypedValue
* fr
= frame_local(m_fp
, local
);
4324 cgetl_body(m_fp
, fr
, to
, local
);
4327 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetL2(PC
& pc
) {
4330 TypedValue
* oldTop
= m_stack
.topTV();
4331 TypedValue
* newTop
= m_stack
.allocTV();
4332 memcpy(newTop
, oldTop
, sizeof *newTop
);
4334 TypedValue
* fr
= frame_local(m_fp
, local
);
4335 cgetl_body(m_fp
, fr
, to
, local
);
4338 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetL3(PC
& pc
) {
4341 TypedValue
* oldTop
= m_stack
.topTV();
4342 TypedValue
* oldSubTop
= m_stack
.indTV(1);
4343 TypedValue
* newTop
= m_stack
.allocTV();
4344 memmove(newTop
, oldTop
, sizeof *oldTop
* 2);
4345 Cell
* to
= oldSubTop
;
4346 TypedValue
* fr
= frame_local(m_fp
, local
);
4347 cgetl_body(m_fp
, fr
, to
, local
);
4350 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetN(PC
& pc
) {
4353 TypedValue
* to
= m_stack
.topTV();
4354 TypedValue
* fr
= nullptr;
4355 lookup_var(m_fp
, name
, to
, fr
);
4356 if (fr
== nullptr || fr
->m_type
== KindOfUninit
) {
4357 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
4358 tvRefcountedDecRefCell(to
);
4361 tvRefcountedDecRefCell(to
);
4362 cgetl_inner_body(fr
, to
);
4364 decRefStr(name
); // TODO(#1146727): leaks during exceptions
4367 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetG(PC
& pc
) {
4370 TypedValue
* to
= m_stack
.topTV();
4371 TypedValue
* fr
= nullptr;
4372 lookup_gbl(m_fp
, name
, to
, fr
);
4373 if (fr
== nullptr) {
4375 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
4377 tvRefcountedDecRefCell(to
);
4379 } else if (fr
->m_type
== KindOfUninit
) {
4380 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
4381 tvRefcountedDecRefCell(to
);
4384 tvRefcountedDecRefCell(to
);
4385 cgetl_inner_body(fr
, to
);
4387 decRefStr(name
); // TODO(#1146727): leaks during exceptions
4390 #define SPROP_OP_PRELUDE \
4392 TypedValue* clsref = m_stack.topTV(); \
4393 TypedValue* nameCell = m_stack.indTV(1); \
4394 TypedValue* output = nameCell; \
4396 bool visible, accessible; \
4397 lookup_sprop(m_fp, clsref, name, nameCell, val, visible, \
4400 #define SPROP_OP_POSTLUDE \
4403 #define GETS(box) do { \
4405 if (!(visible && accessible)) { \
4406 raise_error("Invalid static property access: %s::%s", \
4407 clsref->m_data.pcls->name()->data(), \
4411 if (val->m_type != KindOfRef) { \
4414 varDup(*val, *output); \
4416 tvReadCell(val, output); \
4422 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetS(PC
& pc
) {
4425 if (shouldProfile() && name
&& name
->isStatic()) {
4426 recordType(TypeProfileKey(TypeProfileKey::StaticPropName
, name
),
4427 m_stack
.top()->m_type
);
4431 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetM(PC
& pc
) {
4434 DECLARE_GETHELPER_ARGS
4435 getHelper(GETHELPER_ARGS
);
4436 if (tvRet
->m_type
== KindOfRef
) {
4439 assert(hasImmVector(toOp(*oldPC
)));
4440 const ImmVector
& immVec
= ImmVector::createFromStream(oldPC
+ 1);
4443 if (immVec
.decodeLastMember(curUnit(), name
, mc
)) {
4444 recordType(TypeProfileKey(mc
, name
), m_stack
.top()->m_type
);
4448 static inline void vgetl_body(TypedValue
* fr
, TypedValue
* to
) {
4449 if (fr
->m_type
!= KindOfRef
) {
4455 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetL(PC
& pc
) {
4458 Var
* to
= m_stack
.allocV();
4459 TypedValue
* fr
= frame_local(m_fp
, local
);
4463 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetN(PC
& pc
) {
4466 TypedValue
* to
= m_stack
.topTV();
4467 TypedValue
* fr
= nullptr;
4468 lookupd_var(m_fp
, name
, to
, fr
);
4469 assert(fr
!= nullptr);
4470 tvRefcountedDecRefCell(to
);
4475 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetG(PC
& pc
) {
4478 TypedValue
* to
= m_stack
.topTV();
4479 TypedValue
* fr
= nullptr;
4480 lookupd_gbl(m_fp
, name
, to
, fr
);
4481 assert(fr
!= nullptr);
4482 tvRefcountedDecRefCell(to
);
4487 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetS(PC
& pc
) {
4493 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetM(PC
& pc
) {
4495 DECLARE_SETHELPER_ARGS
4496 TypedValue
* tv1
= m_stack
.allocTV();
4498 if (!setHelperPre
<false, true, false, true, 1,
4499 VectorLeaveCode::ConsumeAll
>(MEMBERHELPERPRE_ARGS
)) {
4500 if (base
->m_type
!= KindOfRef
) {
4503 varDup(*base
, *tv1
);
4508 setHelperPost
<1>(SETHELPERPOST_ARGS
);
4511 inline void OPTBLD_INLINE
VMExecutionContext::iopIssetN(PC
& pc
) {
4514 TypedValue
* tv1
= m_stack
.topTV();
4515 TypedValue
* tv
= nullptr;
4517 lookup_var(m_fp
, name
, tv1
, tv
);
4518 if (tv
== nullptr) {
4521 e
= isset(tvAsCVarRef(tv
));
4523 tvRefcountedDecRefCell(tv1
);
4524 tv1
->m_data
.num
= e
;
4525 tv1
->m_type
= KindOfBoolean
;
4529 inline void OPTBLD_INLINE
VMExecutionContext::iopIssetG(PC
& pc
) {
4532 TypedValue
* tv1
= m_stack
.topTV();
4533 TypedValue
* tv
= nullptr;
4535 lookup_gbl(m_fp
, name
, tv1
, tv
);
4536 if (tv
== nullptr) {
4539 e
= isset(tvAsCVarRef(tv
));
4541 tvRefcountedDecRefCell(tv1
);
4542 tv1
->m_data
.num
= e
;
4543 tv1
->m_type
= KindOfBoolean
;
4547 inline void OPTBLD_INLINE
VMExecutionContext::iopIssetS(PC
& pc
) {
4551 if (!(visible
&& accessible
)) {
4554 e
= isset(tvAsCVarRef(val
));
4557 output
->m_data
.num
= e
;
4558 output
->m_type
= KindOfBoolean
;
4562 template <bool isEmpty
>
4563 inline void OPTBLD_INLINE
VMExecutionContext::isSetEmptyM(PC
& pc
) {
4565 DECLARE_GETHELPER_ARGS
4566 getHelperPre
<false, false, VectorLeaveCode::LeaveLast
>(MEMBERHELPERPRE_ARGS
);
4567 // Process last member specially, in order to employ the IssetElem/IssetProp
4569 bool isSetEmptyResult
= false;
4575 isSetEmptyResult
= IssetEmptyElem
<isEmpty
>(tvScratch
, *tvRef
.asTypedValue(),
4582 Class
* ctx
= arGetContextClass(m_fp
);
4583 isSetEmptyResult
= IssetEmptyProp
<isEmpty
>(ctx
, base
, curMember
);
4586 default: assert(false);
4588 getHelperPost
<false>(GETHELPERPOST_ARGS
);
4589 tvRet
->m_data
.num
= isSetEmptyResult
;
4590 tvRet
->m_type
= KindOfBoolean
;
4593 inline void OPTBLD_INLINE
VMExecutionContext::iopIssetM(PC
& pc
) {
4594 isSetEmptyM
<false>(pc
);
4597 #define IOP_TYPE_CHECK_INSTR_L(checkInit, what, predicate) \
4598 inline void OPTBLD_INLINE VMExecutionContext::iopIs ## what ## L(PC& pc) { \
4601 TypedValue* tv = frame_local(m_fp, local); \
4602 if (checkInit && tv->m_type == KindOfUninit) { \
4603 raise_undefined_local(m_fp, local); \
4605 bool ret = predicate(tvAsCVarRef(tv)); \
4606 TypedValue* topTv = m_stack.allocTV(); \
4607 topTv->m_data.num = ret; \
4608 topTv->m_type = KindOfBoolean; \
4611 #define IOP_TYPE_CHECK_INSTR_C(checkInit, what, predicate) \
4612 inline void OPTBLD_INLINE VMExecutionContext::iopIs ## what ## C(PC& pc) { \
4614 TypedValue* topTv = m_stack.topTV(); \
4615 assert(topTv->m_type != KindOfRef); \
4616 bool ret = predicate(tvAsCVarRef(topTv)); \
4617 tvRefcountedDecRefCell(topTv); \
4618 topTv->m_data.num = ret; \
4619 topTv->m_type = KindOfBoolean; \
4622 #define IOP_TYPE_CHECK_INSTR(checkInit, what, predicate) \
4623 IOP_TYPE_CHECK_INSTR_L(checkInit, what, predicate) \
4624 IOP_TYPE_CHECK_INSTR_C(checkInit, what, predicate) \
4626 IOP_TYPE_CHECK_INSTR_L(false, set, isset)
4627 IOP_TYPE_CHECK_INSTR(true, Null
, is_null
)
4628 IOP_TYPE_CHECK_INSTR(true, Array
, is_array
)
4629 IOP_TYPE_CHECK_INSTR(true, String
, is_string
)
4630 IOP_TYPE_CHECK_INSTR(true, Object
, is_object
)
4631 IOP_TYPE_CHECK_INSTR(true, Int
, is_int
)
4632 IOP_TYPE_CHECK_INSTR(true, Double
, is_double
)
4633 IOP_TYPE_CHECK_INSTR(true, Bool
, is_bool
)
4634 #undef IOP_TYPE_CHECK_INSTR
4636 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyL(PC
& pc
) {
4639 TypedValue
* loc
= frame_local(m_fp
, local
);
4640 bool e
= empty(tvAsCVarRef(loc
));
4641 TypedValue
* tv1
= m_stack
.allocTV();
4642 tv1
->m_data
.num
= e
;
4643 tv1
->m_type
= KindOfBoolean
;
4646 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyN(PC
& pc
) {
4649 TypedValue
* tv1
= m_stack
.topTV();
4650 TypedValue
* tv
= nullptr;
4652 lookup_var(m_fp
, name
, tv1
, tv
);
4653 if (tv
== nullptr) {
4656 e
= empty(tvAsCVarRef(tv
));
4658 tvRefcountedDecRefCell(tv1
);
4659 tv1
->m_data
.num
= e
;
4660 tv1
->m_type
= KindOfBoolean
;
4664 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyG(PC
& pc
) {
4667 TypedValue
* tv1
= m_stack
.topTV();
4668 TypedValue
* tv
= nullptr;
4670 lookup_gbl(m_fp
, name
, tv1
, tv
);
4671 if (tv
== nullptr) {
4674 e
= empty(tvAsCVarRef(tv
));
4676 tvRefcountedDecRefCell(tv1
);
4677 tv1
->m_data
.num
= e
;
4678 tv1
->m_type
= KindOfBoolean
;
4682 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyS(PC
& pc
) {
4686 if (!(visible
&& accessible
)) {
4689 e
= empty(tvAsCVarRef(val
));
4692 output
->m_data
.num
= e
;
4693 output
->m_type
= KindOfBoolean
;
4697 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyM(PC
& pc
) {
4698 isSetEmptyM
<true>(pc
);
4701 inline void OPTBLD_INLINE
VMExecutionContext::iopAKExists(PC
& pc
) {
4703 TypedValue
* arr
= m_stack
.topTV();
4704 TypedValue
* key
= arr
+ 1;
4705 bool result
= f_array_key_exists(tvAsCVarRef(key
), tvAsCVarRef(arr
));
4707 tvRefcountedDecRef(key
);
4708 key
->m_data
.num
= result
;
4709 key
->m_type
= KindOfBoolean
;
4712 inline void OPTBLD_INLINE
VMExecutionContext::iopArrayIdx(PC
& pc
) {
4714 TypedValue
* def
= m_stack
.topTV();
4715 TypedValue
* arr
= m_stack
.indTV(1);
4716 TypedValue
* key
= m_stack
.indTV(2);
4718 Variant result
= f_hphp_array_idx(tvAsCVarRef(key
),
4723 tvAsVariant(key
) = result
;
4726 inline void OPTBLD_INLINE
VMExecutionContext::iopSetL(PC
& pc
) {
4729 assert(local
< m_fp
->m_func
->numLocals());
4730 Cell
* fr
= m_stack
.topC();
4731 TypedValue
* to
= frame_local(m_fp
, local
);
4735 inline void OPTBLD_INLINE
VMExecutionContext::iopSetN(PC
& pc
) {
4738 Cell
* fr
= m_stack
.topC();
4739 TypedValue
* tv2
= m_stack
.indTV(1);
4740 TypedValue
* to
= nullptr;
4741 lookupd_var(m_fp
, name
, tv2
, to
);
4742 assert(to
!= nullptr);
4744 memcpy((void*)tv2
, (void*)fr
, sizeof(TypedValue
));
4749 inline void OPTBLD_INLINE
VMExecutionContext::iopSetG(PC
& pc
) {
4752 Cell
* fr
= m_stack
.topC();
4753 TypedValue
* tv2
= m_stack
.indTV(1);
4754 TypedValue
* to
= nullptr;
4755 lookupd_gbl(m_fp
, name
, tv2
, to
);
4756 assert(to
!= nullptr);
4758 memcpy((void*)tv2
, (void*)fr
, sizeof(TypedValue
));
4763 inline void OPTBLD_INLINE
VMExecutionContext::iopSetS(PC
& pc
) {
4765 TypedValue
* tv1
= m_stack
.topTV();
4766 TypedValue
* classref
= m_stack
.indTV(1);
4767 TypedValue
* propn
= m_stack
.indTV(2);
4768 TypedValue
* output
= propn
;
4771 bool visible
, accessible
;
4772 lookup_sprop(m_fp
, classref
, name
, propn
, val
, visible
, accessible
);
4773 if (!(visible
&& accessible
)) {
4774 raise_error("Invalid static property access: %s::%s",
4775 classref
->m_data
.pcls
->name()->data(),
4779 tvRefcountedDecRefCell(propn
);
4780 memcpy(output
, tv1
, sizeof(TypedValue
));
4781 m_stack
.ndiscard(2);
4785 inline void OPTBLD_INLINE
VMExecutionContext::iopSetM(PC
& pc
) {
4787 DECLARE_SETHELPER_ARGS
4788 if (!setHelperPre
<false, true, false, false, 1,
4789 VectorLeaveCode::LeaveLast
>(MEMBERHELPERPRE_ARGS
)) {
4790 Cell
* c1
= m_stack
.topC();
4793 SetNewElem
<true>(base
, c1
);
4800 StringData
* result
= SetElem
<true>(base
, curMember
, c1
);
4802 tvRefcountedDecRefCell(c1
);
4803 c1
->m_type
= KindOfString
;
4804 c1
->m_data
.pstr
= result
;
4811 Class
* ctx
= arGetContextClass(m_fp
);
4812 SetProp
<true>(ctx
, base
, curMember
, c1
);
4815 default: assert(false);
4819 setHelperPost
<1>(SETHELPERPOST_ARGS
);
4822 inline void OPTBLD_INLINE
VMExecutionContext::iopSetWithRefLM(PC
& pc
) {
4824 DECLARE_SETHELPER_ARGS
4825 bool skip
= setHelperPre
<false, true, false, false, 0,
4826 VectorLeaveCode::ConsumeAll
>(MEMBERHELPERPRE_ARGS
);
4829 TypedValue
* from
= frame_local(m_fp
, local
);
4830 tvAsVariant(base
) = withRefBind(tvAsVariant(from
));
4832 setHelperPost
<0>(SETHELPERPOST_ARGS
);
4835 inline void OPTBLD_INLINE
VMExecutionContext::iopSetWithRefRM(PC
& pc
) {
4837 DECLARE_SETHELPER_ARGS
4838 bool skip
= setHelperPre
<false, true, false, false, 1,
4839 VectorLeaveCode::ConsumeAll
>(MEMBERHELPERPRE_ARGS
);
4841 TypedValue
* from
= m_stack
.top();
4842 tvAsVariant(base
) = withRefBind(tvAsVariant(from
));
4844 setHelperPost
<0>(SETHELPERPOST_ARGS
);
4848 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpL(PC
& pc
) {
4851 DECODE(unsigned char, op
);
4852 Cell
* fr
= m_stack
.topC();
4853 TypedValue
* to
= frame_local(m_fp
, local
);
4854 SETOP_BODY(to
, op
, fr
);
4855 tvRefcountedDecRefCell(fr
);
4859 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpN(PC
& pc
) {
4861 DECODE(unsigned char, op
);
4863 Cell
* fr
= m_stack
.topC();
4864 TypedValue
* tv2
= m_stack
.indTV(1);
4865 TypedValue
* to
= nullptr;
4866 // XXX We're probably not getting warnings totally correct here
4867 lookupd_var(m_fp
, name
, tv2
, to
);
4868 assert(to
!= nullptr);
4869 SETOP_BODY(to
, op
, fr
);
4870 tvRefcountedDecRef(fr
);
4871 tvRefcountedDecRef(tv2
);
4872 tvReadCell(to
, tv2
);
4877 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpG(PC
& pc
) {
4879 DECODE(unsigned char, op
);
4881 Cell
* fr
= m_stack
.topC();
4882 TypedValue
* tv2
= m_stack
.indTV(1);
4883 TypedValue
* to
= nullptr;
4884 // XXX We're probably not getting warnings totally correct here
4885 lookupd_gbl(m_fp
, name
, tv2
, to
);
4886 assert(to
!= nullptr);
4887 SETOP_BODY(to
, op
, fr
);
4888 tvRefcountedDecRef(fr
);
4889 tvRefcountedDecRef(tv2
);
4890 tvReadCell(to
, tv2
);
4895 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpS(PC
& pc
) {
4897 DECODE(unsigned char, op
);
4898 Cell
* fr
= m_stack
.topC();
4899 TypedValue
* classref
= m_stack
.indTV(1);
4900 TypedValue
* propn
= m_stack
.indTV(2);
4901 TypedValue
* output
= propn
;
4904 bool visible
, accessible
;
4905 lookup_sprop(m_fp
, classref
, name
, propn
, val
, visible
, accessible
);
4906 if (!(visible
&& accessible
)) {
4907 raise_error("Invalid static property access: %s::%s",
4908 classref
->m_data
.pcls
->name()->data(),
4911 SETOP_BODY(val
, op
, fr
);
4912 tvRefcountedDecRefCell(propn
);
4913 tvRefcountedDecRef(fr
);
4914 tvReadCell(val
, output
);
4915 m_stack
.ndiscard(2);
4919 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpM(PC
& pc
) {
4921 DECODE(unsigned char, op
);
4922 DECLARE_SETHELPER_ARGS
4923 if (!setHelperPre
<MoreWarnings
, true, false, false, 1,
4924 VectorLeaveCode::LeaveLast
>(MEMBERHELPERPRE_ARGS
)) {
4926 Cell
* rhs
= m_stack
.topC();
4929 result
= SetOpNewElem(tvScratch
, *tvRef
.asTypedValue(), op
, base
, rhs
);
4936 result
= SetOpElem(tvScratch
, *tvRef
.asTypedValue(), op
, base
,
4942 Class
*ctx
= arGetContextClass(m_fp
);
4943 result
= SetOpProp(tvScratch
, *tvRef
.asTypedValue(), ctx
, op
, base
,
4949 result
= nullptr; // Silence compiler warning.
4953 tvRefcountedDecRef(rhs
);
4954 tvReadCell(result
, rhs
);
4956 setHelperPost
<1>(SETHELPERPOST_ARGS
);
4959 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecL(PC
& pc
) {
4962 DECODE(unsigned char, op
);
4963 TypedValue
* to
= m_stack
.allocTV();
4965 TypedValue
* fr
= frame_local(m_fp
, local
);
4966 IncDecBody
<true>(op
, fr
, to
);
4969 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecN(PC
& pc
) {
4971 DECODE(unsigned char, op
);
4973 TypedValue
* nameCell
= m_stack
.topTV();
4974 TypedValue
* local
= nullptr;
4975 // XXX We're probably not getting warnings totally correct here
4976 lookupd_var(m_fp
, name
, nameCell
, local
);
4977 assert(local
!= nullptr);
4978 IncDecBody
<true>(op
, local
, nameCell
);
4982 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecG(PC
& pc
) {
4984 DECODE(unsigned char, op
);
4986 TypedValue
* nameCell
= m_stack
.topTV();
4987 TypedValue
* gbl
= nullptr;
4988 // XXX We're probably not getting warnings totally correct here
4989 lookupd_gbl(m_fp
, name
, nameCell
, gbl
);
4990 assert(gbl
!= nullptr);
4991 IncDecBody
<true>(op
, gbl
, nameCell
);
4995 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecS(PC
& pc
) {
4998 DECODE(unsigned char, op
);
4999 if (!(visible
&& accessible
)) {
5000 raise_error("Invalid static property access: %s::%s",
5001 clsref
->m_data
.pcls
->name()->data(),
5004 tvRefcountedDecRefCell(nameCell
);
5005 IncDecBody
<true>(op
, val
, output
);
5010 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecM(PC
& pc
) {
5012 DECODE(unsigned char, op
);
5013 DECLARE_SETHELPER_ARGS
5016 if (!setHelperPre
<MoreWarnings
, true, false, false, 0,
5017 VectorLeaveCode::LeaveLast
>(MEMBERHELPERPRE_ARGS
)) {
5019 IncDecNewElem
<true>(tvScratch
, *tvRef
.asTypedValue(), op
, base
, to
);
5026 IncDecElem
<true>(tvScratch
, *tvRef
.asTypedValue(), op
, base
,
5032 Class
* ctx
= arGetContextClass(m_fp
);
5033 IncDecProp
<true>(tvScratch
, *tvRef
.asTypedValue(), ctx
, op
, base
,
5037 default: assert(false);
5041 setHelperPost
<0>(SETHELPERPOST_ARGS
);
5042 Cell
* c1
= m_stack
.allocC();
5043 memcpy(c1
, &to
, sizeof(TypedValue
));
5046 inline void OPTBLD_INLINE
VMExecutionContext::iopBindL(PC
& pc
) {
5049 Var
* fr
= m_stack
.topV();
5050 TypedValue
* to
= frame_local(m_fp
, local
);
5054 inline void OPTBLD_INLINE
VMExecutionContext::iopBindN(PC
& pc
) {
5057 TypedValue
* fr
= m_stack
.topTV();
5058 TypedValue
* nameTV
= m_stack
.indTV(1);
5059 TypedValue
* to
= nullptr;
5060 lookupd_var(m_fp
, name
, nameTV
, to
);
5061 assert(to
!= nullptr);
5063 memcpy((void*)nameTV
, (void*)fr
, sizeof(TypedValue
));
5068 inline void OPTBLD_INLINE
VMExecutionContext::iopBindG(PC
& pc
) {
5071 TypedValue
* fr
= m_stack
.topTV();
5072 TypedValue
* nameTV
= m_stack
.indTV(1);
5073 TypedValue
* to
= nullptr;
5074 lookupd_gbl(m_fp
, name
, nameTV
, to
);
5075 assert(to
!= nullptr);
5077 memcpy((void*)nameTV
, (void*)fr
, sizeof(TypedValue
));
5082 inline void OPTBLD_INLINE
VMExecutionContext::iopBindS(PC
& pc
) {
5084 TypedValue
* fr
= m_stack
.topTV();
5085 TypedValue
* classref
= m_stack
.indTV(1);
5086 TypedValue
* propn
= m_stack
.indTV(2);
5087 TypedValue
* output
= propn
;
5090 bool visible
, accessible
;
5091 lookup_sprop(m_fp
, classref
, name
, propn
, val
, visible
, accessible
);
5092 if (!(visible
&& accessible
)) {
5093 raise_error("Invalid static property access: %s::%s",
5094 classref
->m_data
.pcls
->name()->data(),
5098 tvRefcountedDecRefCell(propn
);
5099 memcpy(output
, fr
, sizeof(TypedValue
));
5100 m_stack
.ndiscard(2);
5104 inline void OPTBLD_INLINE
VMExecutionContext::iopBindM(PC
& pc
) {
5106 DECLARE_SETHELPER_ARGS
5107 TypedValue
* tv1
= m_stack
.topTV();
5108 if (!setHelperPre
<false, true, false, true, 1,
5109 VectorLeaveCode::ConsumeAll
>(MEMBERHELPERPRE_ARGS
)) {
5110 // Bind the element/property with the var on the top of the stack
5113 setHelperPost
<1>(SETHELPERPOST_ARGS
);
5116 inline void OPTBLD_INLINE
VMExecutionContext::iopUnsetL(PC
& pc
) {
5119 assert(local
< m_fp
->m_func
->numLocals());
5120 TypedValue
* tv
= frame_local(m_fp
, local
);
5121 tvRefcountedDecRef(tv
);
5125 inline void OPTBLD_INLINE
VMExecutionContext::iopUnsetN(PC
& pc
) {
5128 TypedValue
* tv1
= m_stack
.topTV();
5129 TypedValue
* tv
= nullptr;
5130 lookup_var(m_fp
, name
, tv1
, tv
);
5131 assert(!m_fp
->hasInvName());
5132 if (tv
!= nullptr) {
5133 tvRefcountedDecRef(tv
);
5140 inline void OPTBLD_INLINE
VMExecutionContext::iopUnsetG(PC
& pc
) {
5142 TypedValue
* tv1
= m_stack
.topTV();
5143 StringData
* name
= lookup_name(tv1
);
5144 VarEnv
* varEnv
= m_globalVarEnv
;
5145 assert(varEnv
!= nullptr);
5146 varEnv
->unset(name
);
5151 inline void OPTBLD_INLINE
VMExecutionContext::iopUnsetM(PC
& pc
) {
5153 DECLARE_SETHELPER_ARGS
5154 if (!setHelperPre
<false, false, true, false, 0,
5155 VectorLeaveCode::LeaveLast
>(MEMBERHELPERPRE_ARGS
)) {
5161 UnsetElem(base
, curMember
);
5166 Class
* ctx
= arGetContextClass(m_fp
);
5167 UnsetProp(ctx
, base
, curMember
);
5170 default: assert(false);
5173 setHelperPost
<0>(SETHELPERPOST_ARGS
);
5176 inline ActRec
* OPTBLD_INLINE
VMExecutionContext::fPushFuncImpl(
5179 DEBUGGER_IF(phpBreakpointEnabled(func
->name()->data()));
5180 ActRec
* ar
= m_stack
.allocA();
5183 ar
->initNumArgs(numArgs
);
5184 ar
->setVarEnv(nullptr);
5188 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushFunc(PC
& pc
) {
5190 DECODE_IVA(numArgs
);
5191 Cell
* c1
= m_stack
.topC();
5192 const Func
* func
= nullptr;
5193 ObjectData
* origObj
= nullptr;
5194 StringData
* origSd
= nullptr;
5195 if (IS_STRING_TYPE(c1
->m_type
)) {
5196 origSd
= c1
->m_data
.pstr
;
5197 func
= Unit::loadFunc(origSd
);
5198 } else if (c1
->m_type
== KindOfObject
) {
5199 static StringData
* invokeName
= StringData::GetStaticString("__invoke");
5200 origObj
= c1
->m_data
.pobj
;
5201 const Class
* cls
= origObj
->getVMClass();
5202 func
= cls
->lookupMethod(invokeName
);
5203 if (func
== nullptr) {
5204 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING
);
5207 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING
);
5209 if (func
== nullptr) {
5210 raise_error("Call to undefined function %s()", c1
->m_data
.pstr
->data());
5212 assert(!origObj
|| !origSd
);
5213 assert(origObj
|| origSd
);
5214 // We've already saved origObj or origSd; we'll use them after
5215 // overwriting the pointer on the stack. Don't refcount it now; defer
5216 // till after we're done with it.
5218 ActRec
* ar
= fPushFuncImpl(func
, numArgs
);
5220 if (func
->attrs() & AttrStatic
&& !func
->isClosureBody()) {
5221 ar
->setClass(origObj
->getVMClass());
5224 ar
->setThis(origObj
);
5225 // Teleport the reference from the destroyed stack cell to the
5226 // ActRec. Don't try this at home.
5229 ar
->setThis(nullptr);
5234 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushFuncD(PC
& pc
) {
5236 DECODE_IVA(numArgs
);
5238 const NamedEntityPair nep
= m_fp
->m_func
->unit()->lookupNamedEntityPairId(id
);
5239 Func
* func
= Unit::loadFunc(nep
.second
, nep
.first
);
5240 if (func
== nullptr) {
5241 raise_error("Call to undefined function %s()",
5242 m_fp
->m_func
->unit()->lookupLitstrId(id
)->data());
5244 ActRec
* ar
= fPushFuncImpl(func
, numArgs
);
5245 ar
->setThis(nullptr);
5248 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushFuncU(PC
& pc
) {
5250 DECODE_IVA(numArgs
);
5252 DECODE(Id
, globalFunc
);
5253 Unit
* unit
= m_fp
->m_func
->unit();
5254 const NamedEntityPair nep
= unit
->lookupNamedEntityPairId(nsFunc
);
5255 Func
* func
= Unit::loadFunc(nep
.second
, nep
.first
);
5256 if (func
== nullptr) {
5257 const NamedEntityPair nep2
= unit
->lookupNamedEntityPairId(globalFunc
);
5258 func
= Unit::loadFunc(nep2
.second
, nep2
.first
);
5259 if (func
== nullptr) {
5260 const char *funcName
= unit
->lookupLitstrId(nsFunc
)->data();
5261 raise_error("Call to undefined function %s()", funcName
);
5264 ActRec
* ar
= fPushFuncImpl(func
, numArgs
);
5265 ar
->setThis(nullptr);
5268 void VMExecutionContext::fPushObjMethodImpl(
5269 Class
* cls
, StringData
* name
, ObjectData
* obj
, int numArgs
) {
5271 LookupResult res
= lookupObjMethod(f
, cls
, name
, true);
5273 ActRec
* ar
= m_stack
.allocA();
5276 if (res
== LookupResult::MethodFoundNoThis
) {
5280 assert(res
== LookupResult::MethodFoundWithThis
||
5281 res
== LookupResult::MagicCallFound
);
5282 /* Transfer ownership of obj to the ActRec*/
5285 ar
->initNumArgs(numArgs
);
5286 if (res
== LookupResult::MagicCallFound
) {
5287 ar
->setInvName(name
);
5289 ar
->setVarEnv(NULL
);
5294 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushObjMethod(PC
& pc
) {
5296 DECODE_IVA(numArgs
);
5297 Cell
* c1
= m_stack
.topC(); // Method name.
5298 if (!IS_STRING_TYPE(c1
->m_type
)) {
5299 raise_error(Strings::METHOD_NAME_MUST_BE_STRING
);
5301 Cell
* c2
= m_stack
.indC(1); // Object.
5302 if (c2
->m_type
!= KindOfObject
) {
5303 throw_call_non_object(c1
->m_data
.pstr
->data());
5305 ObjectData
* obj
= c2
->m_data
.pobj
;
5306 Class
* cls
= obj
->getVMClass();
5307 StringData
* name
= c1
->m_data
.pstr
;
5308 // We handle decReffing obj and name in fPushObjMethodImpl
5309 m_stack
.ndiscard(2);
5310 fPushObjMethodImpl(cls
, name
, obj
, numArgs
);
5313 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushObjMethodD(PC
& pc
) {
5315 DECODE_IVA(numArgs
);
5316 DECODE_LITSTR(name
);
5317 Cell
* c1
= m_stack
.topC();
5318 if (c1
->m_type
!= KindOfObject
) {
5319 throw_call_non_object(name
->data());
5321 ObjectData
* obj
= c1
->m_data
.pobj
;
5322 Class
* cls
= obj
->getVMClass();
5323 // We handle decReffing obj in fPushObjMethodImpl
5325 fPushObjMethodImpl(cls
, name
, obj
, numArgs
);
5328 template<bool forwarding
>
5329 void VMExecutionContext::pushClsMethodImpl(Class
* cls
,
5334 LookupResult res
= lookupClsMethod(f
, cls
, name
, obj
, getFP(), true);
5335 if (res
== LookupResult::MethodFoundNoThis
||
5336 res
== LookupResult::MagicCallStaticFound
) {
5340 assert(res
== LookupResult::MethodFoundWithThis
||
5341 res
== LookupResult::MagicCallFound
);
5345 ActRec
* ar
= m_stack
.allocA();
5354 /* Propogate the current late bound class if there is one, */
5355 /* otherwise use the class given by this instruction's input */
5356 if (m_fp
->hasThis()) {
5357 cls
= m_fp
->getThis()->getVMClass();
5358 } else if (m_fp
->hasClass()) {
5359 cls
= m_fp
->getClass();
5364 ar
->initNumArgs(numArgs
);
5365 if (res
== LookupResult::MagicCallFound
||
5366 res
== LookupResult::MagicCallStaticFound
) {
5367 ar
->setInvName(name
);
5369 ar
->setVarEnv(nullptr);
5370 decRefStr(const_cast<StringData
*>(name
));
5374 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushClsMethod(PC
& pc
) {
5376 DECODE_IVA(numArgs
);
5377 Cell
* c1
= m_stack
.indC(1); // Method name.
5378 if (!IS_STRING_TYPE(c1
->m_type
)) {
5379 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING
);
5381 TypedValue
* tv
= m_stack
.top();
5382 assert(tv
->m_type
== KindOfClass
);
5383 Class
* cls
= tv
->m_data
.pcls
;
5384 StringData
* name
= c1
->m_data
.pstr
;
5385 // CLSMETHOD_BODY will take care of decReffing name
5386 m_stack
.ndiscard(2);
5387 assert(cls
&& name
);
5388 ObjectData
* obj
= m_fp
->hasThis() ? m_fp
->getThis() : nullptr;
5389 pushClsMethodImpl
<false>(cls
, name
, obj
, numArgs
);
5392 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushClsMethodD(PC
& pc
) {
5394 DECODE_IVA(numArgs
);
5395 DECODE_LITSTR(name
);
5396 DECODE(Id
, classId
);
5397 const NamedEntityPair
&nep
=
5398 m_fp
->m_func
->unit()->lookupNamedEntityPairId(classId
);
5399 Class
* cls
= Unit::loadClass(nep
.second
, nep
.first
);
5400 if (cls
== nullptr) {
5401 raise_error(Strings::UNKNOWN_CLASS
, nep
.first
->data());
5403 ObjectData
* obj
= m_fp
->hasThis() ? m_fp
->getThis() : nullptr;
5404 pushClsMethodImpl
<false>(cls
, name
, obj
, numArgs
);
5407 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushClsMethodF(PC
& pc
) {
5409 DECODE_IVA(numArgs
);
5410 Cell
* c1
= m_stack
.indC(1); // Method name.
5411 if (!IS_STRING_TYPE(c1
->m_type
)) {
5412 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING
);
5414 TypedValue
* tv
= m_stack
.top();
5415 assert(tv
->m_type
== KindOfClass
);
5416 Class
* cls
= tv
->m_data
.pcls
;
5418 StringData
* name
= c1
->m_data
.pstr
;
5419 // CLSMETHOD_BODY will take care of decReffing name
5420 m_stack
.ndiscard(2);
5421 ObjectData
* obj
= m_fp
->hasThis() ? m_fp
->getThis() : nullptr;
5422 pushClsMethodImpl
<true>(cls
, name
, obj
, numArgs
);
5425 #undef CLSMETHOD_BODY
5427 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCtor(PC
& pc
) {
5429 DECODE_IVA(numArgs
);
5430 TypedValue
* tv
= m_stack
.topTV();
5431 assert(tv
->m_type
== KindOfClass
);
5432 Class
* cls
= tv
->m_data
.pcls
;
5433 assert(cls
!= nullptr);
5436 LookupResult res UNUSED
= lookupCtorMethod(f
, cls
, true);
5437 assert(res
== LookupResult::MethodFoundWithThis
);
5438 // Replace input with uninitialized instance.
5439 ObjectData
* this_
= newInstance(cls
);
5440 TRACE(2, "FPushCtor: just new'ed an instance of class %s: %p\n",
5441 cls
->name()->data(), this_
);
5442 this_
->incRefCount();
5443 this_
->incRefCount();
5444 tv
->m_type
= KindOfObject
;
5445 tv
->m_data
.pobj
= this_
;
5446 // Push new activation record.
5447 ActRec
* ar
= m_stack
.allocA();
5451 ar
->initNumArgs(numArgs
, true /* isFPushCtor */);
5453 ar
->setVarEnv(nullptr);
5456 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCtorD(PC
& pc
) {
5458 DECODE_IVA(numArgs
);
5460 const NamedEntityPair
&nep
=
5461 m_fp
->m_func
->unit()->lookupNamedEntityPairId(id
);
5462 Class
* cls
= Unit::loadClass(nep
.second
, nep
.first
);
5463 if (cls
== nullptr) {
5464 raise_error(Strings::UNKNOWN_CLASS
,
5465 m_fp
->m_func
->unit()->lookupLitstrId(id
)->data());
5469 LookupResult res UNUSED
= lookupCtorMethod(f
, cls
, true);
5470 assert(res
== LookupResult::MethodFoundWithThis
);
5471 // Push uninitialized instance.
5472 ObjectData
* this_
= newInstance(cls
);
5473 TRACE(2, "FPushCtorD: new'ed an instance of class %s: %p\n",
5474 cls
->name()->data(), this_
);
5475 this_
->incRefCount();
5476 m_stack
.pushObject(this_
);
5477 // Push new activation record.
5478 ActRec
* ar
= m_stack
.allocA();
5482 ar
->initNumArgs(numArgs
, true /* isFPushCtor */);
5483 ar
->setVarEnv(nullptr);
5486 inline void OPTBLD_INLINE
VMExecutionContext::iopDecodeCufIter(PC
& pc
) {
5490 DECODE(Offset
, offset
);
5492 Iter
* it
= frame_iter(m_fp
, itId
);
5493 CufIter
&cit
= it
->cuf();
5495 ObjectData
* obj
= nullptr;
5496 HPHP::Class
* cls
= nullptr;
5497 StringData
* invName
= nullptr;
5498 TypedValue
*func
= m_stack
.topTV();
5501 if (m_fp
->m_func
->isBuiltin()) {
5502 ar
= getOuterVMFrame(ar
);
5504 const Func
* f
= vm_decode_function(tvAsVariant(func
),
5510 pc
= origPc
+ offset
;
5519 cit
.setName(invName
);
5524 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCufIter(PC
& pc
) {
5526 DECODE_IVA(numArgs
);
5529 Iter
* it
= frame_iter(m_fp
, itId
);
5531 auto f
= it
->cuf().func();
5532 auto o
= it
->cuf().ctx();
5533 auto n
= it
->cuf().name();
5535 ActRec
* ar
= m_stack
.allocA();
5538 ar
->m_this
= (ObjectData
*)o
;
5539 if (o
&& !(uintptr_t(o
) & 1)) ar
->m_this
->incRefCount();
5544 ar
->setVarEnv(nullptr);
5546 ar
->initNumArgs(numArgs
, false /* isFPushCtor */);
5549 inline void OPTBLD_INLINE
VMExecutionContext::doFPushCuf(PC
& pc
,
5553 DECODE_IVA(numArgs
);
5555 TypedValue func
= m_stack
.topTV()[safe
];
5557 ObjectData
* obj
= nullptr;
5558 HPHP::Class
* cls
= nullptr;
5559 StringData
* invName
= nullptr;
5561 const Func
* f
= vm_decode_function(tvAsVariant(&func
), getFP(),
5566 if (safe
) m_stack
.topTV()[1] = m_stack
.topTV()[0];
5567 m_stack
.ndiscard(1);
5569 f
= SystemLib::s_nullFunc
;
5571 m_stack
.pushFalse();
5577 ActRec
* ar
= m_stack
.allocA();
5586 ar
->setThis(nullptr);
5588 ar
->initNumArgs(numArgs
, false /* isFPushCtor */);
5590 ar
->setInvName(invName
);
5592 ar
->setVarEnv(nullptr);
5594 tvRefcountedDecRef(&func
);
5597 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCuf(PC
& pc
) {
5598 doFPushCuf(pc
, false, false);
5601 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCufF(PC
& pc
) {
5602 doFPushCuf(pc
, true, false);
5605 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCufSafe(PC
& pc
) {
5606 doFPushCuf(pc
, false, true);
5609 static inline ActRec
* arFromInstr(TypedValue
* sp
, const Op
* pc
) {
5610 return arFromSpOffset((ActRec
*)sp
, instrSpToArDelta(pc
));
5613 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassC(PC
& pc
) {
5615 ActRec
* ar
= arFromInstr(m_stack
.top(), (Op
*)pc
);
5618 DECODE_IVA(paramId
);
5620 assert(paramId
< ar
->numArgs());
5624 #define FPASSC_CHECKED_PRELUDE \
5625 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc); \
5627 DECODE_IVA(paramId); \
5628 assert(paramId < ar->numArgs()); \
5629 const Func* func = ar->m_func;
5631 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassCW(PC
& pc
) {
5632 FPASSC_CHECKED_PRELUDE
5633 if (func
->mustBeRef(paramId
)) {
5634 TRACE(1, "FPassCW: function %s(%d) param %d is by reference, "
5635 "raising a strict warning (attr:0x%x)\n",
5636 func
->name()->data(), func
->numParams(), paramId
,
5637 func
->info() ? func
->info()->attribute
: 0);
5638 raise_strict_warning("Only variables should be passed by reference");
5642 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassCE(PC
& pc
) {
5643 FPASSC_CHECKED_PRELUDE
5644 if (func
->mustBeRef(paramId
)) {
5645 TRACE(1, "FPassCE: function %s(%d) param %d is by reference, "
5646 "throwing a fatal error (attr:0x%x)\n",
5647 func
->name()->data(), func
->numParams(), paramId
,
5648 func
->info() ? func
->info()->attribute
: 0);
5649 raise_error("Cannot pass parameter %d by reference", paramId
+1);
5653 #undef FPASSC_CHECKED_PRELUDE
5655 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassV(PC
& pc
) {
5656 ActRec
* ar
= arFromInstr(m_stack
.top(), (Op
*)pc
);
5658 DECODE_IVA(paramId
);
5659 assert(paramId
< ar
->numArgs());
5660 const Func
* func
= ar
->m_func
;
5661 if (!func
->byRef(paramId
)) {
5666 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassR(PC
& pc
) {
5667 ActRec
* ar
= arFromInstr(m_stack
.top(), (Op
*)pc
);
5669 DECODE_IVA(paramId
);
5670 assert(paramId
< ar
->numArgs());
5671 const Func
* func
= ar
->m_func
;
5672 if (func
->byRef(paramId
)) {
5673 TypedValue
* tv
= m_stack
.topTV();
5674 if (tv
->m_type
!= KindOfRef
) {
5678 if (m_stack
.topTV()->m_type
== KindOfRef
) {
5684 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassL(PC
& pc
) {
5685 ActRec
* ar
= arFromInstr(m_stack
.top(), (Op
*)pc
);
5687 DECODE_IVA(paramId
);
5689 assert(paramId
< ar
->numArgs());
5690 TypedValue
* fr
= frame_local(m_fp
, local
);
5691 TypedValue
* to
= m_stack
.allocTV();
5692 if (!ar
->m_func
->byRef(paramId
)) {
5693 cgetl_body(m_fp
, fr
, to
, local
);
5699 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassN(PC
& pc
) {
5700 ActRec
* ar
= arFromInstr(m_stack
.top(), (Op
*)pc
);
5703 DECODE_IVA(paramId
);
5704 assert(paramId
< ar
->numArgs());
5705 if (!ar
->m_func
->byRef(paramId
)) {
5712 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassG(PC
& pc
) {
5713 ActRec
* ar
= arFromInstr(m_stack
.top(), (Op
*)pc
);
5716 DECODE_IVA(paramId
);
5717 assert(paramId
< ar
->numArgs());
5718 if (!ar
->m_func
->byRef(paramId
)) {
5725 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassS(PC
& pc
) {
5726 ActRec
* ar
= arFromInstr(m_stack
.top(), (Op
*)pc
);
5729 DECODE_IVA(paramId
);
5730 assert(paramId
< ar
->numArgs());
5731 if (!ar
->m_func
->byRef(paramId
)) {
5738 void VMExecutionContext::iopFPassM(PC
& pc
) {
5739 ActRec
* ar
= arFromInstr(m_stack
.top(), (Op
*)pc
);
5741 DECODE_IVA(paramId
);
5742 assert(paramId
< ar
->numArgs());
5743 if (!ar
->m_func
->byRef(paramId
)) {
5744 DECLARE_GETHELPER_ARGS
5745 getHelper(GETHELPER_ARGS
);
5746 if (tvRet
->m_type
== KindOfRef
) {
5750 DECLARE_SETHELPER_ARGS
5751 TypedValue
* tv1
= m_stack
.allocTV();
5753 if (!setHelperPre
<false, true, false, true, 1,
5754 VectorLeaveCode::ConsumeAll
>(MEMBERHELPERPRE_ARGS
)) {
5755 if (base
->m_type
!= KindOfRef
) {
5758 varDup(*base
, *tv1
);
5763 setHelperPost
<1>(SETHELPERPOST_ARGS
);
5767 bool VMExecutionContext::doFCall(ActRec
* ar
, PC
& pc
) {
5768 assert(getOuterVMFrame(ar
) == m_fp
);
5770 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedFrame());
5771 assert(isReturnHelper(ar
->m_savedRip
));
5772 TRACE(3, "FCall: pc %p func %p base %d\n", m_pc
,
5773 m_fp
->m_func
->unit()->entry(),
5774 int(m_fp
->m_func
->base()));
5775 ar
->m_soff
= m_fp
->m_func
->unit()->offsetOf(pc
)
5776 - (uintptr_t)m_fp
->m_func
->base();
5777 assert(pcOff() >= m_fp
->m_func
->base());
5778 prepareFuncEntry(ar
, pc
);
5780 if (EventHook::FunctionEnter(ar
, EventHook::NormalFunc
)) return true;
5785 inline void OPTBLD_INLINE
VMExecutionContext::iopFCall(PC
& pc
) {
5786 ActRec
* ar
= arFromInstr(m_stack
.top(), (Op
*)pc
);
5788 DECODE_IVA(numArgs
);
5789 assert(numArgs
== ar
->numArgs());
5790 checkStack(m_stack
, ar
->m_func
);
5794 // Return a function pointer type for calling a builtin with a given
5795 // return value and args.
5796 template<class Ret
, class... Args
> struct NativeFunction
{
5797 typedef Ret (*type
)(Args
...);
5800 // Recursively pack all parameters up to call a native builtin.
5801 template<class Ret
, size_t NArgs
, size_t CurArg
> struct NativeFuncCaller
;
5802 template<class Ret
, size_t NArgs
, size_t CurArg
> struct NativeFuncCaller
{
5803 template<class... Args
>
5804 static Ret
call(const Func
* func
, TypedValue
* tvs
, Args
... args
) {
5805 typedef NativeFuncCaller
<Ret
,NArgs
- 1,CurArg
+ 1> NextArgT
;
5806 DataType type
= func
->params()[CurArg
].builtinType();
5807 if (type
== KindOfDouble
) {
5808 // pass TV.m_data.dbl by value with C++ calling convention for doubles
5809 return NextArgT::call(func
, tvs
- 1, args
..., tvs
->m_data
.dbl
);
5811 if (type
== KindOfInt64
|| type
== KindOfBoolean
) {
5812 // pass TV.m_data.num by value
5813 return NextArgT::call(func
, tvs
- 1, args
..., tvs
->m_data
.num
);
5815 if (IS_STRING_TYPE(type
) || type
== KindOfArray
|| type
== KindOfObject
) {
5816 // pass ptr to TV.m_data for String&, Array&, or Object&
5817 return NextArgT::call(func
, tvs
- 1, args
..., &tvs
->m_data
);
5819 // final case is for passing full value as Variant&
5820 return NextArgT::call(func
, tvs
- 1, args
..., tvs
);
5823 template<class Ret
, size_t CurArg
> struct NativeFuncCaller
<Ret
,0,CurArg
> {
5824 template<class... Args
>
5825 static Ret
call(const Func
* f
, TypedValue
*, Args
... args
) {
5826 typedef typename NativeFunction
<Ret
,Args
...>::type FuncType
;
5827 return reinterpret_cast<FuncType
>(f
->nativeFuncPtr())(args
...);
5832 static Ret
makeNativeCall(const Func
* f
, TypedValue
* args
, size_t numArgs
) {
5833 static_assert(kMaxBuiltinArgs
== 5,
5834 "makeNativeCall needs updates for kMaxBuiltinArgs");
5836 case 0: return NativeFuncCaller
<Ret
,0,0>::call(f
, args
);
5837 case 1: return NativeFuncCaller
<Ret
,1,0>::call(f
, args
);
5838 case 2: return NativeFuncCaller
<Ret
,2,0>::call(f
, args
);
5839 case 3: return NativeFuncCaller
<Ret
,3,0>::call(f
, args
);
5840 case 4: return NativeFuncCaller
<Ret
,4,0>::call(f
, args
);
5841 case 5: return NativeFuncCaller
<Ret
,5,0>::call(f
, args
);
5842 default: assert(false);
5848 static int makeNativeRefCall(const Func
* f
, Ret
* ret
,
5849 TypedValue
* args
, size_t numArgs
) {
5851 case 0: return NativeFuncCaller
<int64_t,0,0>::call(f
, args
, ret
);
5852 case 1: return NativeFuncCaller
<int64_t,1,0>::call(f
, args
, ret
);
5853 case 2: return NativeFuncCaller
<int64_t,2,0>::call(f
, args
, ret
);
5854 case 3: return NativeFuncCaller
<int64_t,3,0>::call(f
, args
, ret
);
5855 case 4: return NativeFuncCaller
<int64_t,4,0>::call(f
, args
, ret
);
5856 case 5: return NativeFuncCaller
<int64_t,5,0>::call(f
, args
, ret
);
5857 default: assert(false);
5862 inline void OPTBLD_INLINE
VMExecutionContext::iopFCallBuiltin(PC
& pc
) {
5864 DECODE_IVA(numArgs
);
5865 DECODE_IVA(numNonDefault
);
5867 const NamedEntity
* ne
= m_fp
->m_func
->unit()->lookupNamedEntityId(id
);
5868 Func
* func
= Unit::lookupFunc(ne
);
5869 if (func
== nullptr) {
5870 raise_error("Undefined function: %s",
5871 m_fp
->m_func
->unit()->lookupLitstrId(id
)->data());
5873 TypedValue
* args
= m_stack
.indTV(numArgs
-1);
5874 assert(numArgs
== func
->numParams());
5875 bool zendParamMode
= func
->info()->attribute
& ClassInfo::ZendParamMode
;
5878 for (int i
= 0; i
< numNonDefault
; i
++) {
5879 const Func::ParamInfo
& pi
= func
->params()[i
];
5881 if (zendParamMode
) {
5882 #define CASE(kind) case KindOf ## kind : do { \
5883 if (!tvCoerceParamTo ## kind ## InPlace(&args[-i])) { \
5884 ret.m_type = KindOfNull; \
5889 switch (pi
.builtinType()) {
5905 #define CASE(kind) case KindOf ## kind : do { \
5906 tvCastTo ## kind ## InPlace(&args[-i]); break; \
5908 switch (pi
.builtinType()) {
5925 ret
.m_type
= func
->returnType();
5926 switch (func
->returnType()) {
5928 ret
.m_data
.num
= makeNativeCall
<bool>(func
, args
, numArgs
);
5930 case KindOfNull
: /* void return type */
5932 ret
.m_data
.num
= makeNativeCall
<int64_t>(func
, args
, numArgs
);
5935 case KindOfStaticString
:
5938 makeNativeRefCall(func
, &ret
.m_data
, args
, numArgs
);
5939 if (ret
.m_data
.num
== 0) {
5940 ret
.m_type
= KindOfNull
;
5944 makeNativeRefCall(func
, &ret
, args
, numArgs
);
5945 if (ret
.m_type
== KindOfUninit
) {
5946 ret
.m_type
= KindOfNull
;
5954 frame_free_args(args
, numNonDefault
);
5955 m_stack
.ndiscard(numArgs
);
5956 memcpy(m_stack
.allocTV(), &ret
, sizeof(TypedValue
));
5959 bool VMExecutionContext::prepareArrayArgs(ActRec
* ar
,
5961 if (UNLIKELY(ar
->hasInvName())) {
5962 m_stack
.pushStringNoRc(ar
->getInvName());
5963 m_stack
.pushArray(args
);
5969 int nargs
= args
->size();
5970 const Func
* f
= ar
->m_func
;
5971 int nparams
= f
->numParams();
5972 int extra
= nargs
- nparams
;
5977 ssize_t pos
= args
->iter_begin();
5978 for (int i
= 0; i
< nparams
; ++i
) {
5979 TypedValue
* from
= const_cast<TypedValue
*>(
5980 args
->getValueRef(pos
).asTypedValue());
5981 TypedValue
* to
= m_stack
.allocTV();
5982 if (UNLIKELY(f
->byRef(i
))) {
5983 if (UNLIKELY(!tvAsVariant(from
).isReferenced())) {
5984 raise_warning("Parameter %d to %s() expected to be a reference, "
5985 "value given", i
+ 1, f
->fullName()->data());
5986 if (skipCufOnInvalidParams
) {
5988 while (i
--) m_stack
.popTV();
5997 if (UNLIKELY(to
->m_type
== KindOfRef
)) {
6001 pos
= args
->iter_advance(pos
);
6003 if (extra
&& (ar
->m_func
->attrs() & AttrMayUseVV
)) {
6004 ExtraArgs
* extraArgs
= ExtraArgs::allocateUninit(extra
);
6005 for (int i
= 0; i
< extra
; ++i
) {
6006 TypedValue
* to
= extraArgs
->getExtraArg(i
);
6007 tvDup(*args
->getValueRef(pos
).asTypedValue(), *to
);
6008 if (to
->m_type
== KindOfRef
&& to
->m_data
.pref
->_count
== 2) {
6011 pos
= args
->iter_advance(pos
);
6013 ar
->setExtraArgs(extraArgs
);
6014 ar
->initNumArgs(nargs
);
6016 ar
->initNumArgs(nparams
);
6022 static void cleanupParamsAndActRec(Stack
& stack
,
6024 ExtraArgs
* extraArgs
) {
6025 assert(stack
.top() + (extraArgs
?
6026 ar
->m_func
->numParams() :
6027 ar
->numArgs()) == (void*)ar
);
6029 const int numExtra
= ar
->numArgs() - ar
->m_func
->numParams();
6030 ExtraArgs::deallocate(extraArgs
, numExtra
);
6032 while (stack
.top() != (void*)ar
) {
6038 bool VMExecutionContext::doFCallArray(PC
& pc
) {
6039 ActRec
* ar
= (ActRec
*)(m_stack
.top() + 1);
6040 assert(ar
->numArgs() == 1);
6042 Cell
* c1
= m_stack
.topC();
6043 if (skipCufOnInvalidParams
&& UNLIKELY(c1
->m_type
!= KindOfArray
)) {
6045 // this is what we /should/ do, but our code base depends
6046 // on the broken behavior of casting the second arg to an
6048 cleanupParamsAndActRec(m_stack
, ar
, nullptr);
6050 raise_warning("call_user_func_array() expects parameter 2 to be array");
6054 const Func
* func
= ar
->m_func
;
6056 Array
args(LIKELY(c1
->m_type
== KindOfArray
) ? c1
->m_data
.parr
:
6057 tvAsVariant(c1
).toArray().get());
6059 checkStack(m_stack
, func
);
6061 assert(ar
->m_savedRbp
== (uint64_t)m_fp
);
6062 assert(!ar
->m_func
->isGenerator());
6064 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedFrame());
6065 assert(isReturnHelper(ar
->m_savedRip
));
6066 TRACE(3, "FCallArray: pc %p func %p base %d\n", m_pc
,
6067 m_fp
->m_func
->unit()->entry(),
6068 int(m_fp
->m_func
->base()));
6069 ar
->m_soff
= m_fp
->m_func
->unit()->offsetOf(pc
)
6070 - (uintptr_t)m_fp
->m_func
->base();
6071 assert(pcOff() > m_fp
->m_func
->base());
6073 if (UNLIKELY(!prepareArrayArgs(ar
, args
.get()))) return false;
6076 if (UNLIKELY(!(prepareFuncEntry(ar
, pc
)))) {
6080 if (UNLIKELY(!EventHook::FunctionEnter(ar
, EventHook::NormalFunc
))) {
6087 inline void OPTBLD_INLINE
VMExecutionContext::iopFCallArray(PC
& pc
) {
6089 (void)doFCallArray(pc
);
6092 inline void OPTBLD_INLINE
VMExecutionContext::iopCufSafeArray(PC
& pc
) {
6095 ret
.append(tvAsVariant(m_stack
.top() + 1));
6096 ret
.appendWithRef(tvAsVariant(m_stack
.top() + 0));
6099 tvAsVariant(m_stack
.top()) = ret
;
6102 inline void OPTBLD_INLINE
VMExecutionContext::iopCufSafeReturn(PC
& pc
) {
6104 bool ok
= tvAsVariant(m_stack
.top() + 1).toBoolean();
6105 tvRefcountedDecRef(m_stack
.top() + 1);
6106 tvRefcountedDecRef(m_stack
.top() + (ok
? 2 : 0));
6107 if (ok
) m_stack
.top()[2] = m_stack
.top()[0];
6108 m_stack
.ndiscard(2);
6111 inline bool VMExecutionContext::initIterator(PC
& pc
, PC
& origPc
, Iter
* it
,
6112 Offset offset
, Cell
* c1
) {
6113 bool hasElems
= it
->init(c1
);
6121 inline void OPTBLD_INLINE
VMExecutionContext::iopIterInit(PC
& pc
) {
6125 DECODE(Offset
, offset
);
6127 Cell
* c1
= m_stack
.topC();
6128 Iter
* it
= frame_iter(m_fp
, itId
);
6129 TypedValue
* tv1
= frame_local(m_fp
, val
);
6130 if (initIterator(pc
, origPc
, it
, offset
, c1
)) {
6131 tvAsVariant(tv1
) = it
->arr().second();
6135 inline void OPTBLD_INLINE
VMExecutionContext::iopIterInitK(PC
& pc
) {
6139 DECODE(Offset
, offset
);
6142 Cell
* c1
= m_stack
.topC();
6143 Iter
* it
= frame_iter(m_fp
, itId
);
6144 TypedValue
* tv1
= frame_local(m_fp
, val
);
6145 TypedValue
* tv2
= frame_local(m_fp
, key
);
6146 if (initIterator(pc
, origPc
, it
, offset
, c1
)) {
6147 tvAsVariant(tv1
) = it
->arr().second();
6148 tvAsVariant(tv2
) = it
->arr().first();
6152 inline void OPTBLD_INLINE
VMExecutionContext::iopWIterInit(PC
& pc
) {
6156 DECODE(Offset
, offset
);
6158 Cell
* c1
= m_stack
.topC();
6159 Iter
* it
= frame_iter(m_fp
, itId
);
6160 TypedValue
* tv1
= frame_local(m_fp
, val
);
6161 if (initIterator(pc
, origPc
, it
, offset
, c1
)) {
6162 tvAsVariant(tv1
) = withRefBind(it
->arr().secondRef());
6166 inline void OPTBLD_INLINE
VMExecutionContext::iopWIterInitK(PC
& pc
) {
6170 DECODE(Offset
, offset
);
6173 Cell
* c1
= m_stack
.topC();
6174 Iter
* it
= frame_iter(m_fp
, itId
);
6175 TypedValue
* tv1
= frame_local(m_fp
, val
);
6176 TypedValue
* tv2
= frame_local(m_fp
, key
);
6177 if (initIterator(pc
, origPc
, it
, offset
, c1
)) {
6178 tvAsVariant(tv1
) = withRefBind(it
->arr().secondRef());
6179 tvAsVariant(tv2
) = it
->arr().first();
6184 inline bool VMExecutionContext::initIteratorM(PC
& pc
, PC
& origPc
, Iter
* it
,
6185 Offset offset
, Var
* v1
,
6188 bool hasElems
= false;
6189 TypedValue
* rtv
= v1
->m_data
.pref
->tv();
6190 if (rtv
->m_type
== KindOfArray
) {
6191 hasElems
= new_miter_array_key(it
, v1
->m_data
.pref
, val
, key
);
6192 } else if (rtv
->m_type
== KindOfObject
) {
6193 Class
* ctx
= arGetContextClass(g_vmContext
->getFP());
6194 hasElems
= new_miter_object(it
, v1
->m_data
.pref
, ctx
, val
, key
);
6196 hasElems
= new_miter_other(it
, v1
->m_data
.pref
);
6207 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterInit(PC
& pc
) {
6211 DECODE(Offset
, offset
);
6213 Var
* v1
= m_stack
.topV();
6214 assert(v1
->m_type
== KindOfRef
);
6215 Iter
* it
= frame_iter(m_fp
, itId
);
6216 TypedValue
* tv1
= frame_local(m_fp
, val
);
6217 initIteratorM(pc
, origPc
, it
, offset
, v1
, tv1
, nullptr);
6220 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterInitK(PC
& pc
) {
6224 DECODE(Offset
, offset
);
6227 Var
* v1
= m_stack
.topV();
6228 assert(v1
->m_type
== KindOfRef
);
6229 Iter
* it
= frame_iter(m_fp
, itId
);
6230 TypedValue
* tv1
= frame_local(m_fp
, val
);
6231 TypedValue
* tv2
= frame_local(m_fp
, key
);
6232 initIteratorM(pc
, origPc
, it
, offset
, v1
, tv1
, tv2
);
6235 inline void OPTBLD_INLINE
VMExecutionContext::iopIterNext(PC
& pc
) {
6239 DECODE(Offset
, offset
);
6241 Iter
* it
= frame_iter(m_fp
, itId
);
6242 TypedValue
* tv1
= frame_local(m_fp
, val
);
6245 tvAsVariant(tv1
) = it
->arr().second();
6249 inline void OPTBLD_INLINE
VMExecutionContext::iopIterNextK(PC
& pc
) {
6253 DECODE(Offset
, offset
);
6256 Iter
* it
= frame_iter(m_fp
, itId
);
6257 TypedValue
* tv1
= frame_local(m_fp
, val
);
6258 TypedValue
* tv2
= frame_local(m_fp
, key
);
6261 tvAsVariant(tv1
) = it
->arr().second();
6262 tvAsVariant(tv2
) = it
->arr().first();
6266 inline void OPTBLD_INLINE
VMExecutionContext::iopWIterNext(PC
& pc
) {
6270 DECODE(Offset
, offset
);
6272 Iter
* it
= frame_iter(m_fp
, itId
);
6273 TypedValue
* tv1
= frame_local(m_fp
, val
);
6276 tvAsVariant(tv1
) = withRefBind(it
->arr().secondRef());
6280 inline void OPTBLD_INLINE
VMExecutionContext::iopWIterNextK(PC
& pc
) {
6284 DECODE(Offset
, offset
);
6287 Iter
* it
= frame_iter(m_fp
, itId
);
6288 TypedValue
* tv1
= frame_local(m_fp
, val
);
6289 TypedValue
* tv2
= frame_local(m_fp
, key
);
6292 tvAsVariant(tv1
) = withRefBind(it
->arr().secondRef());
6293 tvAsVariant(tv2
) = it
->arr().first();
6297 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterNext(PC
& pc
) {
6301 DECODE(Offset
, offset
);
6303 Iter
* it
= frame_iter(m_fp
, itId
);
6304 TypedValue
* tv1
= frame_local(m_fp
, val
);
6305 if (miter_next_key(it
, tv1
, nullptr)) {
6310 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterNextK(PC
& pc
) {
6314 DECODE(Offset
, offset
);
6317 Iter
* it
= frame_iter(m_fp
, itId
);
6318 TypedValue
* tv1
= frame_local(m_fp
, val
);
6319 TypedValue
* tv2
= frame_local(m_fp
, key
);
6320 if (miter_next_key(it
, tv1
, tv2
)) {
6325 inline void OPTBLD_INLINE
VMExecutionContext::iopIterFree(PC
& pc
) {
6328 Iter
* it
= frame_iter(m_fp
, itId
);
6332 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterFree(PC
& pc
) {
6335 Iter
* it
= frame_iter(m_fp
, itId
);
6339 inline void OPTBLD_INLINE
VMExecutionContext::iopCIterFree(PC
& pc
) {
6342 Iter
* it
= frame_iter(m_fp
, itId
);
6346 inline void OPTBLD_INLINE
inclOp(VMExecutionContext
*ec
, PC
&pc
,
6347 InclOpFlags flags
) {
6349 Cell
* c1
= ec
->m_stack
.topC();
6350 String
path(prepareKey(c1
));
6352 TRACE(2, "inclOp %s %s %s %s \"%s\"\n",
6353 flags
& InclOpOnce
? "Once" : "",
6354 flags
& InclOpDocRoot
? "DocRoot" : "",
6355 flags
& InclOpRelative
? "Relative" : "",
6356 flags
& InclOpFatal
? "Fatal" : "",
6359 Unit
* u
= flags
& (InclOpDocRoot
|InclOpRelative
) ?
6360 ec
->evalIncludeRoot(path
.get(), flags
, &initial
) :
6361 ec
->evalInclude(path
.get(), ec
->m_fp
->m_func
->unit()->filepath(), &initial
);
6364 ((flags
& InclOpFatal
) ?
6365 (void (*)(const char *, ...))raise_error
:
6366 (void (*)(const char *, ...))raise_warning
)("File not found: %s",
6368 ec
->m_stack
.pushFalse();
6370 if (!(flags
& InclOpOnce
) || initial
) {
6371 ec
->evalUnit(u
, pc
, EventHook::PseudoMain
);
6373 Stats::inc(Stats::PseudoMain_Guarded
);
6374 ec
->m_stack
.pushTrue();
6379 inline void OPTBLD_INLINE
VMExecutionContext::iopIncl(PC
& pc
) {
6380 inclOp(this, pc
, InclOpDefault
);
6383 inline void OPTBLD_INLINE
VMExecutionContext::iopInclOnce(PC
& pc
) {
6384 inclOp(this, pc
, InclOpOnce
);
6387 inline void OPTBLD_INLINE
VMExecutionContext::iopReq(PC
& pc
) {
6388 inclOp(this, pc
, InclOpFatal
);
6391 inline void OPTBLD_INLINE
VMExecutionContext::iopReqOnce(PC
& pc
) {
6392 inclOp(this, pc
, InclOpFatal
| InclOpOnce
);
6395 inline void OPTBLD_INLINE
VMExecutionContext::iopReqDoc(PC
& pc
) {
6396 inclOp(this, pc
, InclOpFatal
| InclOpOnce
| InclOpDocRoot
);
6399 inline void OPTBLD_INLINE
VMExecutionContext::iopEval(PC
& pc
) {
6401 Cell
* c1
= m_stack
.topC();
6402 String
code(prepareKey(c1
));
6403 String prefixedCode
= concat("<?php ", code
);
6404 Unit
* unit
= compileEvalString(prefixedCode
.get());
6405 if (unit
== nullptr) {
6406 raise_error("Syntax error in eval()");
6409 evalUnit(unit
, pc
, EventHook::Eval
);
6412 inline void OPTBLD_INLINE
VMExecutionContext::iopDefFunc(PC
& pc
) {
6415 Func
* f
= m_fp
->m_func
->unit()->lookupFuncId(fid
);
6419 inline void OPTBLD_INLINE
VMExecutionContext::iopDefCls(PC
& pc
) {
6422 PreClass
* c
= m_fp
->m_func
->unit()->lookupPreClassId(cid
);
6426 inline void OPTBLD_INLINE
VMExecutionContext::iopDefTypedef(PC
& pc
) {
6429 m_fp
->m_func
->unit()->defTypedef(tid
);
6432 static inline void checkThis(ActRec
* fp
) {
6433 if (!fp
->hasThis()) {
6434 raise_error(Strings::FATAL_NULL_THIS
);
6438 inline void OPTBLD_INLINE
VMExecutionContext::iopThis(PC
& pc
) {
6441 ObjectData
* this_
= m_fp
->getThis();
6442 m_stack
.pushObject(this_
);
6445 inline void OPTBLD_INLINE
VMExecutionContext::iopBareThis(PC
& pc
) {
6447 DECODE(unsigned char, notice
);
6448 if (m_fp
->hasThis()) {
6449 ObjectData
* this_
= m_fp
->getThis();
6450 m_stack
.pushObject(this_
);
6453 if (notice
) raise_notice(Strings::WARN_NULL_THIS
);
6457 inline void OPTBLD_INLINE
VMExecutionContext::iopCheckThis(PC
& pc
) {
6462 inline void OPTBLD_INLINE
VMExecutionContext::iopInitThisLoc(PC
& pc
) {
6465 TypedValue
* thisLoc
= frame_local(m_fp
, id
);
6466 tvRefcountedDecRef(thisLoc
);
6467 if (m_fp
->hasThis()) {
6468 thisLoc
->m_data
.pobj
= m_fp
->getThis();
6469 thisLoc
->m_type
= KindOfObject
;
6472 tvWriteUninit(thisLoc
);
6477 * Helper for StaticLoc and StaticLocInit.
6480 lookupStatic(StringData
* name
,
6482 TypedValue
*&val
, bool& inited
) {
6483 HphpArray
* map
= get_static_locals(fp
);
6484 assert(map
!= nullptr);
6485 val
= map
->nvGet(name
);
6486 if (val
== nullptr) {
6489 map
->set(name
, tvAsCVarRef(&tv
), false);
6490 val
= map
->nvGet(name
);
6497 inline void OPTBLD_INLINE
VMExecutionContext::iopStaticLoc(PC
& pc
) {
6499 DECODE_IVA(localId
);
6501 TypedValue
* fr
= nullptr;
6503 lookupStatic(var
, m_fp
, fr
, inited
);
6504 assert(fr
!= nullptr);
6505 if (fr
->m_type
!= KindOfRef
) {
6509 TypedValue
* tvLocal
= frame_local(m_fp
, localId
);
6510 tvBind(fr
, tvLocal
);
6514 m_stack
.pushFalse();
6518 inline void OPTBLD_INLINE
VMExecutionContext::iopStaticLocInit(PC
& pc
) {
6520 DECODE_IVA(localId
);
6522 TypedValue
* fr
= nullptr;
6524 lookupStatic(var
, m_fp
, fr
, inited
);
6525 assert(fr
!= nullptr);
6527 Cell
* initVal
= m_stack
.topC();
6528 tvDup(*initVal
, *fr
);
6530 if (fr
->m_type
!= KindOfRef
) {
6534 TypedValue
* tvLocal
= frame_local(m_fp
, localId
);
6535 tvBind(fr
, tvLocal
);
6539 inline void OPTBLD_INLINE
VMExecutionContext::iopCatch(PC
& pc
) {
6541 assert(m_faults
.size() > 0);
6542 Fault fault
= m_faults
.back();
6543 m_faults
.pop_back();
6544 assert(fault
.m_faultType
== Fault::Type::UserException
);
6545 m_stack
.pushObjectNoRc(fault
.m_userException
);
6548 inline void OPTBLD_INLINE
VMExecutionContext::iopLateBoundCls(PC
& pc
) {
6550 Class
* cls
= frameStaticClass(m_fp
);
6552 raise_error(HPHP::Strings::CANT_ACCESS_STATIC
);
6554 m_stack
.pushClass(cls
);
6557 inline void OPTBLD_INLINE
VMExecutionContext::iopVerifyParamType(PC
& pc
) {
6558 SYNC(); // We might need m_pc to be updated to throw.
6562 const Func
*func
= m_fp
->m_func
;
6563 assert(param
< func
->numParams());
6564 assert(func
->numParams() == int(func
->params().size()));
6565 const TypeConstraint
& tc
= func
->params()[param
].typeConstraint();
6566 assert(tc
.hasConstraint() || !RuntimeOption::EvalCheckExtendedTypeHints
);
6567 const TypedValue
*tv
= frame_local(m_fp
, param
);
6568 tc
.verify(tv
, func
, param
);
6571 inline void OPTBLD_INLINE
VMExecutionContext::iopNativeImpl(PC
& pc
) {
6573 uint soff
= m_fp
->m_soff
;
6574 BuiltinFunction func
= m_fp
->m_func
->builtinFuncPtr();
6576 // Actually call the native implementation. This will handle freeing the
6577 // locals in the normal case. In the case of an exception, the VM unwinder
6578 // will take care of it.
6580 // Adjust the stack; the native implementation put the return value in the
6581 // right place for us already
6582 m_stack
.ndiscard(m_fp
->m_func
->numSlotsInFrame());
6583 ActRec
* sfp
= m_fp
->arGetSfp();
6584 if (LIKELY(sfp
!= m_fp
)) {
6585 // Restore caller's execution state.
6587 pc
= m_fp
->m_func
->unit()->entry() + m_fp
->m_func
->base() + soff
;
6590 // No caller; terminate.
6594 std::ostringstream os
;
6595 os
<< toStringElm(m_stack
.topTV());
6597 Trace::trace("Return %s from VMExecutionContext::dispatch("
6598 "%p)\n", os
.str().c_str(), m_fp
));
6605 inline void OPTBLD_INLINE
VMExecutionContext::iopHighInvalid(PC
& pc
) {
6606 fprintf(stderr
, "invalid bytecode executed\n");
6610 inline void OPTBLD_INLINE
VMExecutionContext::iopSelf(PC
& pc
) {
6612 Class
* clss
= arGetContextClass(m_fp
);
6614 raise_error(HPHP::Strings::CANT_ACCESS_SELF
);
6616 m_stack
.pushClass(clss
);
6619 inline void OPTBLD_INLINE
VMExecutionContext::iopParent(PC
& pc
) {
6621 Class
* clss
= arGetContextClass(m_fp
);
6623 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS
);
6625 Class
* parent
= clss
->parent();
6627 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT
);
6629 m_stack
.pushClass(parent
);
6632 inline void OPTBLD_INLINE
VMExecutionContext::iopCreateCl(PC
& pc
) {
6634 DECODE_IVA(numArgs
);
6635 DECODE_LITSTR(clsName
);
6636 Class
* cls
= Unit::loadClass(clsName
);
6637 c_Closure
* cl
= static_cast<c_Closure
*>(newInstance(cls
));
6638 c_Closure
* cl2
= cl
->init(numArgs
, m_fp
, m_stack
.top());
6639 m_stack
.ndiscard(numArgs
);
6641 m_stack
.pushObject(cl2
);
6644 static inline c_Continuation
* createCont(const Func
* origFunc
,
6645 const Func
* genFunc
) {
6646 auto const cont
= c_Continuation::alloc(origFunc
, genFunc
);
6647 cont
->incRefCount();
6648 cont
->setNoDestruct();
6650 // The ActRec corresponding to the generator body lives as long as the object
6651 // does. We set it up once, here, and then just change FP to point to it when
6652 // we enter the generator body.
6653 ActRec
* ar
= cont
->actRec();
6654 ar
->m_func
= genFunc
;
6656 ar
->setVarEnv(nullptr);
6658 TypedValue
* contLocal
= frame_local(ar
, 0);
6659 contLocal
->m_type
= KindOfObject
;
6660 contLocal
->m_data
.pobj
= cont
;
6661 // Do not incref the continuation here! Doing so will create a reference
6662 // cycle, since this reference is a local in the continuation frame and thus
6663 // will be decreffed when the continuation is destroyed. The corresponding
6664 // non-decref is in ~c_Continuation.
6670 VMExecutionContext::createContFunc(const Func
* origFunc
,
6671 const Func
* genFunc
) {
6672 auto cont
= createCont(origFunc
, genFunc
);
6673 cont
->actRec()->setThis(nullptr);
6678 VMExecutionContext::createContMeth(const Func
* origFunc
,
6679 const Func
* genFunc
,
6681 if (origFunc
->isClosureBody()) {
6682 genFunc
= genFunc
->cloneAndSetClass(origFunc
->cls());
6685 auto cont
= createCont(origFunc
, genFunc
);
6686 auto ar
= cont
->actRec();
6687 ar
->setThisOrClass(objOrCls
);
6688 if (ar
->hasThis()) {
6689 ar
->getThis()->incRefCount();
6694 static inline void setContVar(const Func
* genFunc
,
6695 const StringData
* name
,
6697 c_Continuation
* cont
) {
6698 Id destId
= genFunc
->lookupVarId(name
);
6699 if (destId
!= kInvalidId
) {
6700 // Copy the value of the local to the cont object and set the
6701 // local to uninit so that we don't need to change refcounts.
6702 tvCopy(*src
, *frame_local(cont
->actRec(), destId
));
6705 ActRec
*contFP
= cont
->actRec();
6706 if (!contFP
->hasVarEnv()) {
6707 // We pass skipInsert to this VarEnv because it's going to exist
6708 // independent of the chain; i.e. we can't stack-allocate it. We link it
6709 // into the chain in UnpackCont, and take it out in ContSuspend.
6710 contFP
->setVarEnv(VarEnv::createLocalOnHeap(contFP
));
6712 contFP
->getVarEnv()->setWithRef(name
, src
);
6716 static const StaticString
s_this("this");
6719 VMExecutionContext::fillContinuationVars(ActRec
* fp
,
6720 const Func
* origFunc
,
6721 const Func
* genFunc
,
6722 c_Continuation
* cont
) {
6723 // For functions that contain only named locals, the variable
6724 // environment is saved and restored by teleporting the values (and
6725 // their references) between the evaluation stack and the local
6726 // space at the end of the object using memcpy. Any variables in a
6727 // VarEnv are saved and restored from m_vars as usual.
6728 static const StringData
* thisStr
= s_this
.get();
6730 if (fp
->hasVarEnv()) {
6731 Stats::inc(Stats::Cont_CreateVerySlow
);
6732 Array definedVariables
= fp
->getVarEnv()->getDefinedVariables();
6733 skipThis
= definedVariables
.exists(s_this
, true);
6735 for (ArrayIter
iter(definedVariables
); !iter
.end(); iter
.next()) {
6736 setContVar(genFunc
, iter
.first().getStringData(),
6737 const_cast<TypedValue
*>(iter
.secondRef().asTypedValue()), cont
);
6740 skipThis
= origFunc
->lookupVarId(thisStr
) != kInvalidId
;
6741 for (Id i
= 0; i
< origFunc
->numNamedLocals(); ++i
) {
6742 setContVar(genFunc
, origFunc
->localVarName(i
),
6743 frame_local(fp
, i
), cont
);
6747 // If $this is used as a local inside the body and is not provided
6748 // by our containing environment, just prefill it here instead of
6749 // using InitThisLoc inside the body
6750 if (!skipThis
&& fp
->hasThis()) {
6751 Id id
= genFunc
->lookupVarId(thisStr
);
6752 if (id
!= kInvalidId
) {
6753 tvAsVariant(frame_local(cont
->actRec(), id
)) = fp
->getThis();
6759 inline void OPTBLD_INLINE
VMExecutionContext::iopCreateCont(PC
& pc
) {
6761 DECODE_LITSTR(genName
);
6763 const Func
* origFunc
= m_fp
->m_func
;
6764 const Func
* genFunc
= origFunc
->getGeneratorBody(genName
);
6765 assert(genFunc
!= nullptr);
6767 c_Continuation
* cont
= origFunc
->isMethod()
6768 ? createContMeth(origFunc
, genFunc
, m_fp
->getThisOrClass())
6769 : createContFunc(origFunc
, genFunc
);
6771 fillContinuationVars(m_fp
, origFunc
, genFunc
, cont
);
6773 TypedValue
* ret
= m_stack
.allocTV();
6774 ret
->m_type
= KindOfObject
;
6775 ret
->m_data
.pobj
= cont
;
6778 static inline c_Continuation
* frame_continuation(ActRec
* fp
) {
6779 ObjectData
* obj
= frame_local(fp
, 0)->m_data
.pobj
;
6780 assert(dynamic_cast<c_Continuation
*>(obj
));
6781 return static_cast<c_Continuation
*>(obj
);
6784 static inline c_Continuation
* this_continuation(ActRec
* fp
) {
6785 ObjectData
* obj
= fp
->getThis();
6786 assert(dynamic_cast<c_Continuation
*>(obj
));
6787 return static_cast<c_Continuation
*>(obj
);
6790 void VMExecutionContext::iopContEnter(PC
& pc
) {
6793 // The stack must have one cell! Or else generatorStackBase() won't work!
6794 assert(m_stack
.top() + 1 ==
6795 (TypedValue
*)m_fp
- m_fp
->m_func
->numSlotsInFrame());
6797 // Do linkage of the continuation's AR.
6798 assert(m_fp
->hasThis());
6799 c_Continuation
* cont
= this_continuation(m_fp
);
6800 ActRec
* contAR
= cont
->actRec();
6801 arSetSfp(contAR
, m_fp
);
6803 contAR
->m_soff
= m_fp
->m_func
->unit()->offsetOf(pc
)
6804 - (uintptr_t)m_fp
->m_func
->base();
6805 contAR
->m_savedRip
=
6806 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedGeneratorFrame());
6807 assert(isReturnHelper(contAR
->m_savedRip
));
6810 pc
= contAR
->m_func
->getEntry();
6813 if (UNLIKELY(!EventHook::FunctionEnter(contAR
, EventHook::NormalFunc
))) {
6818 inline void OPTBLD_INLINE
VMExecutionContext::iopUnpackCont(PC
& pc
) {
6820 c_Continuation
* cont
= frame_continuation(m_fp
);
6822 // check sanity of received value
6823 assert(tvIsPlausible(m_stack
.topC()));
6825 // Return the label in a stack cell
6826 TypedValue
* label
= m_stack
.allocTV();
6827 label
->m_type
= KindOfInt64
;
6828 label
->m_data
.num
= cont
->m_label
;
6831 inline void OPTBLD_INLINE
VMExecutionContext::iopContSuspend(PC
& pc
) {
6834 c_Continuation
* cont
= frame_continuation(m_fp
);
6836 cont
->c_Continuation::t_update(label
, tvAsCVarRef(m_stack
.topTV()));
6839 EventHook::FunctionExit(m_fp
);
6840 ActRec
* prevFp
= m_fp
->arGetSfp();
6841 pc
= prevFp
->m_func
->getEntry() + m_fp
->m_soff
;
6845 inline void OPTBLD_INLINE
VMExecutionContext::iopContSuspendK(PC
& pc
) {
6848 c_Continuation
* cont
= frame_continuation(m_fp
);
6850 TypedValue
* val
= m_stack
.topTV();
6852 cont
->c_Continuation::t_update_key(label
, tvAsCVarRef(m_stack
.topTV()),
6856 EventHook::FunctionExit(m_fp
);
6857 ActRec
* prevFp
= m_fp
->arGetSfp();
6858 pc
= prevFp
->m_func
->getEntry() + m_fp
->m_soff
;
6862 inline void OPTBLD_INLINE
VMExecutionContext::iopContRetC(PC
& pc
) {
6864 c_Continuation
* cont
= frame_continuation(m_fp
);
6866 tvSetIgnoreRef(*m_stack
.topC(), *cont
->m_value
.asTypedValue());
6869 EventHook::FunctionExit(m_fp
);
6870 ActRec
* prevFp
= m_fp
->arGetSfp();
6871 pc
= prevFp
->m_func
->getEntry() + m_fp
->m_soff
;
6875 inline void OPTBLD_INLINE
VMExecutionContext::iopContCheck(PC
& pc
) {
6877 DECODE_IVA(check_started
);
6878 c_Continuation
* cont
= this_continuation(m_fp
);
6879 if (check_started
) {
6880 cont
->startedCheck();
6885 inline void OPTBLD_INLINE
VMExecutionContext::iopContRaise(PC
& pc
) {
6887 c_Continuation
* cont
= this_continuation(m_fp
);
6888 assert(cont
->m_label
);
6892 inline void OPTBLD_INLINE
VMExecutionContext::iopContValid(PC
& pc
) {
6894 TypedValue
* tv
= m_stack
.allocTV();
6896 tvAsVariant(tv
) = !this_continuation(m_fp
)->done();
6899 inline void OPTBLD_INLINE
VMExecutionContext::iopContKey(PC
& pc
) {
6901 c_Continuation
* cont
= this_continuation(m_fp
);
6902 cont
->startedCheck();
6904 TypedValue
* tv
= m_stack
.allocTV();
6906 tvAsVariant(tv
) = cont
->m_key
;
6909 inline void OPTBLD_INLINE
VMExecutionContext::iopContCurrent(PC
& pc
) {
6911 c_Continuation
* cont
= this_continuation(m_fp
);
6912 cont
->startedCheck();
6914 TypedValue
* tv
= m_stack
.allocTV();
6916 tvAsVariant(tv
) = cont
->m_value
;
6919 inline void OPTBLD_INLINE
VMExecutionContext::iopContStopped(PC
& pc
) {
6921 this_continuation(m_fp
)->setStopped();
6924 inline void OPTBLD_INLINE
VMExecutionContext::iopContHandle(PC
& pc
) {
6926 c_Continuation
* cont
= this_continuation(m_fp
);
6928 cont
->m_value
.setNull();
6930 Variant exn
= tvAsVariant(m_stack
.topTV());
6932 assert(exn
.asObjRef().instanceof(SystemLib::s_ExceptionClass
));
6933 throw exn
.asObjRef();
6936 inline void OPTBLD_INLINE
VMExecutionContext::iopStrlen(PC
& pc
) {
6938 TypedValue
* subj
= m_stack
.topTV();
6939 if (LIKELY(IS_STRING_TYPE(subj
->m_type
))) {
6940 int64_t ans
= subj
->m_data
.pstr
->size();
6941 tvRefcountedDecRef(subj
);
6942 subj
->m_type
= KindOfInt64
;
6943 subj
->m_data
.num
= ans
;
6945 Variant ans
= f_strlen(tvAsVariant(subj
));
6946 tvAsVariant(subj
) = ans
;
6950 inline void OPTBLD_INLINE
VMExecutionContext::iopIncStat(PC
& pc
) {
6952 DECODE_IVA(counter
);
6954 Stats::inc(Stats::StatCounter(counter
), value
);
6957 void VMExecutionContext::classExistsImpl(PC
& pc
, Attr typeAttr
) {
6959 TypedValue
* aloadTV
= m_stack
.topTV();
6960 tvCastToBooleanInPlace(aloadTV
);
6961 assert(aloadTV
->m_type
== KindOfBoolean
);
6962 bool autoload
= aloadTV
->m_data
.num
;
6965 TypedValue
* name
= m_stack
.topTV();
6966 tvCastToStringInPlace(name
);
6967 assert(IS_STRING_TYPE(name
->m_type
));
6969 tvAsVariant(name
) = Unit::classExists(name
->m_data
.pstr
, autoload
, typeAttr
);
6972 inline void OPTBLD_INLINE
VMExecutionContext::iopClassExists(PC
& pc
) {
6973 classExistsImpl(pc
, AttrNone
);
6976 inline void OPTBLD_INLINE
VMExecutionContext::iopInterfaceExists(PC
& pc
) {
6977 classExistsImpl(pc
, AttrInterface
);
6980 inline void OPTBLD_INLINE
VMExecutionContext::iopTraitExists(PC
& pc
) {
6981 classExistsImpl(pc
, AttrTrait
);
6985 VMExecutionContext::prettyStack(const string
& prefix
) const {
6987 string
s("__Halted");
6990 int offset
= (m_fp
->m_func
->unit() != nullptr)
6993 string begPrefix
= prefix
+ "__";
6994 string midPrefix
= prefix
+ "|| ";
6995 string endPrefix
= prefix
+ "\\/";
6996 string stack
= m_stack
.toString(m_fp
, offset
, midPrefix
);
6997 return begPrefix
+ "\n" + stack
+ endPrefix
;
7000 void VMExecutionContext::checkRegStateWork() const {
7001 assert(Transl::tl_regState
== Transl::VMRegState::CLEAN
);
7004 void VMExecutionContext::DumpStack() {
7005 string s
= g_vmContext
->prettyStack("");
7006 fprintf(stderr
, "%s\n", s
.c_str());
7009 void VMExecutionContext::DumpCurUnit(int skip
) {
7010 ActRec
* fp
= g_vmContext
->getFP();
7011 Offset pc
= fp
->m_func
->unit() ? g_vmContext
->pcOff() : 0;
7013 fp
= g_vmContext
->getPrevVMState(fp
, &pc
);
7015 if (fp
== nullptr) {
7016 std::cout
<< "Don't have a valid fp\n";
7020 printf("Offset = %d, in function %s\n", pc
, fp
->m_func
->name()->data());
7021 Unit
* u
= fp
->m_func
->unit();
7023 std::cout
<< "Current unit is NULL\n";
7026 printf("Dumping bytecode for %s(%p)\n", u
->filepath()->data(), u
);
7027 std::cout
<< u
->toString();
7030 void VMExecutionContext::PrintTCCallerInfo() {
7032 ActRec
* fp
= g_vmContext
->getFP();
7033 Unit
* u
= fp
->m_func
->unit();
7034 fprintf(stderr
, "Called from TC address %p\n",
7035 tx()->getTranslatedCaller());
7036 std::cerr
<< u
->filepath()->data() << ':'
7037 << u
->getLineNumber(u
->offsetOf(g_vmContext
->getPC())) << std::endl
;
7041 condStackTraceSep(const char* pfx
) {
7043 "========================================"
7044 "========================================\n",
7048 #define COND_STACKTRACE(pfx) \
7050 string stack = prettyStack(pfx); \
7051 Trace::trace("%s\n", stack.c_str());)
7053 #define O(name, imm, pusph, pop, flags) \
7054 void VMExecutionContext::op##name() { \
7055 condStackTraceSep("op"#name" "); \
7056 COND_STACKTRACE("op"#name" pre: "); \
7058 assert(toOp(*pc) == Op##name); \
7060 int offset = m_fp->m_func->unit()->offsetOf(pc); \
7061 Trace::trace("op"#name" offset: %d\n", offset)); \
7064 COND_STACKTRACE("op"#name" post: "); \
7065 condStackTraceSep("op"#name" "); \
7074 profileReturnValue(const DataType dt
) {
7075 const Func
* f
= curFunc();
7076 if (f
->isPseudoMain() || f
->isClosureBody() || f
->isMagic() ||
7077 Func::isSpecial(f
->name()))
7079 recordType(TypeProfileKey(TypeProfileKey::MethodName
, f
->name()), dt
);
7082 template <int dispatchFlags
>
7083 inline void VMExecutionContext::dispatchImpl(int numInstrs
) {
7084 static const bool limInstrs
= dispatchFlags
& LimitInstrs
;
7085 static const bool breakOnCtlFlow
= dispatchFlags
& BreakOnCtlFlow
;
7086 static const bool profile
= dispatchFlags
& Profile
;
7087 static const void *optabDirect
[] = {
7088 #define O(name, imm, push, pop, flags) \
7093 static const void *optabDbg
[] = {
7094 #define O(name, imm, push, pop, flags) \
7099 static const void *optabCover
[] = {
7100 #define O(name, imm, push, pop, flags) \
7105 assert(sizeof(optabDirect
) / sizeof(const void *) == Op_count
);
7106 assert(sizeof(optabDbg
) / sizeof(const void *) == Op_count
);
7107 const void **optab
= optabDirect
;
7108 bool collectCoverage
= ThreadInfo::s_threadInfo
->
7109 m_reqInjectionData
.getCoverage();
7110 if (collectCoverage
) {
7113 DEBUGGER_ATTACHED_ONLY(optab
= optabDbg
);
7115 * Trace-only mapping of opcodes to names.
7118 static const char *nametab
[] = {
7119 #define O(name, imm, push, pop, flags) \
7124 #endif /* HPHP_TRACE */
7125 bool isCtlFlow
= false;
7127 #define DISPATCH() do { \
7128 if ((breakOnCtlFlow && isCtlFlow) || \
7129 (limInstrs && UNLIKELY(numInstrs-- == 0))) { \
7131 Trace::trace("dispatch: Halt ExecutionContext::dispatch(%p)\n", \
7135 Op op = toOp(*pc); \
7136 COND_STACKTRACE("dispatch: "); \
7138 Trace::trace("dispatch: %d: %s\n", pcOff(), \
7139 nametab[uint8_t(op)])); \
7140 if (profile && (op == OpRetC || op == OpRetV)) { \
7141 profileReturnValue(m_stack.top()->m_type); \
7143 goto *optab[uint8_t(op)]; \
7146 ONTRACE(1, Trace::trace("dispatch: Enter ExecutionContext::dispatch(%p)\n",
7151 #define O(name, imm, pusph, pop, flags) \
7153 phpDebuggerOpcodeHook(pc); \
7155 if (collectCoverage) { \
7156 recordCodeCoverage(pc); \
7161 if (breakOnCtlFlow) { \
7162 isCtlFlow = instrIsControlFlow(Op::name); \
7163 Stats::incOp(Op::name); \
7165 const Op op = Op::name; \
7166 if (op == OpRetC || op == OpRetV || op == OpNativeImpl) { \
7167 if (UNLIKELY(!pc)) { m_fp = 0; return; } \
7176 void VMExecutionContext::dispatch() {
7177 if (shouldProfile()) {
7178 dispatchImpl
<Profile
>(0);
7184 void VMExecutionContext::dispatchN(int numInstrs
) {
7185 dispatchImpl
<LimitInstrs
| BreakOnCtlFlow
>(numInstrs
);
7186 // We are about to go back to Jit, check whether we should
7187 // stick with interpreter
7188 if (DEBUGGER_FORCE_INTR
) {
7189 throw VMSwitchMode();
7193 void VMExecutionContext::dispatchBB() {
7194 dispatchImpl
<BreakOnCtlFlow
>(0);
7195 // We are about to go back to Jit, check whether we should
7196 // stick with interpreter
7197 if (DEBUGGER_FORCE_INTR
) {
7198 throw VMSwitchMode();
7202 void VMExecutionContext::recordCodeCoverage(PC pc
) {
7203 Unit
* unit
= getFP()->m_func
->unit();
7204 assert(unit
!= nullptr);
7205 if (unit
== SystemLib::s_nativeFuncUnit
||
7206 unit
== SystemLib::s_nativeClassUnit
||
7207 unit
== SystemLib::s_hhas_unit
) {
7210 int line
= unit
->getLineNumber(pcOff());
7213 if (unit
!= m_coverPrevUnit
|| line
!= m_coverPrevLine
) {
7214 ThreadInfo
* info
= ThreadInfo::s_threadInfo
.getNoCheck();
7215 m_coverPrevUnit
= unit
;
7216 m_coverPrevLine
= line
;
7217 const StringData
* filepath
= unit
->filepath();
7218 assert(filepath
->isStatic());
7219 info
->m_coverage
->Record(filepath
->data(), line
, line
);
7223 void VMExecutionContext::resetCoverageCounters() {
7224 m_coverPrevLine
= -1;
7225 m_coverPrevUnit
= nullptr;
7228 void VMExecutionContext::pushVMState(VMState
&savedVM
,
7229 const ActRec
* reentryAR
) {
7230 if (debug
&& savedVM
.fp
&&
7231 savedVM
.fp
->m_func
&&
7232 savedVM
.fp
->m_func
->unit()) {
7233 // Some asserts and tracing.
7234 const Func
* func
= savedVM
.fp
->m_func
;
7235 (void) /* bound-check asserts in offsetOf */
7236 func
->unit()->offsetOf(savedVM
.pc
);
7237 TRACE(3, "pushVMState: saving frame %s pc %p off %d fp %p\n",
7238 func
->name()->data(),
7240 func
->unit()->offsetOf(savedVM
.pc
),
7243 m_nestedVMs
.push_back(ReentryRecord(savedVM
, reentryAR
));
7247 void VMExecutionContext::popVMState() {
7248 assert(m_nestedVMs
.size() >= 1);
7250 VMState
&savedVM
= m_nestedVMs
.back().m_savedState
;
7253 m_firstAR
= savedVM
.firstAR
;
7254 assert(m_stack
.top() == savedVM
.sp
);
7258 savedVM
.fp
->m_func
&&
7259 savedVM
.fp
->m_func
->unit()) {
7260 const Func
* func
= savedVM
.fp
->m_func
;
7261 (void) /* bound-check asserts in offsetOf */
7262 func
->unit()->offsetOf(savedVM
.pc
);
7263 TRACE(3, "popVMState: restoring frame %s pc %p off %d fp %p\n",
7264 func
->name()->data(),
7266 func
->unit()->offsetOf(savedVM
.pc
),
7271 m_nestedVMs
.pop_back();
7275 void VMExecutionContext::requestInit() {
7276 assert(SystemLib::s_unit
);
7277 assert(SystemLib::s_nativeFuncUnit
);
7278 assert(SystemLib::s_nativeClassUnit
);
7280 new (&s_requestArenaStorage
) RequestArena();
7281 new (&s_varEnvArenaStorage
) VarEnvArena();
7283 EnvConstants::requestInit(new (request_arena()) EnvConstants());
7284 VarEnv::createGlobal();
7285 m_stack
.requestInit();
7286 Transl::Translator::advanceTranslator();
7287 tx()->requestInit();
7289 if (UNLIKELY(RuntimeOption::EvalJitEnableRenameFunction
)) {
7290 SystemLib::s_unit
->merge();
7291 if (SystemLib::s_hhas_unit
) SystemLib::s_hhas_unit
->merge();
7292 SystemLib::s_nativeFuncUnit
->merge();
7293 SystemLib::s_nativeClassUnit
->merge();
7295 // System units are always merge only, and
7296 // everything is persistent.
7297 assert(SystemLib::s_unit
->isEmpty());
7298 assert(!SystemLib::s_hhas_unit
|| SystemLib::s_hhas_unit
->isEmpty());
7299 assert(SystemLib::s_nativeFuncUnit
->isEmpty());
7300 assert(SystemLib::s_nativeClassUnit
->isEmpty());
7303 profileRequestStart();
7306 Class
* cls
= Unit::GetNamedEntity(s_stdclass
.get())->clsList();
7308 assert(cls
== SystemLib::s_stdclassClass
);
7312 void VMExecutionContext::requestExit() {
7313 treadmillSharedVars();
7316 tx()->requestExit();
7317 Transl::Translator::clearTranslator();
7318 m_stack
.requestExit();
7319 profileRequestEnd();
7320 EventHook::Disable();
7321 EnvConstants::requestExit();
7323 if (m_globalVarEnv
) {
7324 VarEnv::destroy(m_globalVarEnv
);
7328 varenv_arena().~VarEnvArena();
7329 request_arena().~RequestArena();
7332 ///////////////////////////////////////////////////////////////////////////////