2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2013 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/bytecode.h"
18 #include "hphp/compiler/builtin_symbols.h"
19 #include "hphp/runtime/vm/event_hook.h"
20 #include "hphp/runtime/vm/jit/translator-x64.h"
21 #include "hphp/runtime/vm/srckey.h"
22 #include "hphp/runtime/vm/member_operations.h"
23 #include "hphp/runtime/base/code_coverage.h"
24 #include "hphp/runtime/base/file_repository.h"
25 #include "hphp/runtime/base/base_includes.h"
26 #include "hphp/runtime/base/execution_context.h"
27 #include "hphp/runtime/base/runtime_option.h"
28 #include "hphp/runtime/base/array/hphp_array.h"
29 #include "hphp/runtime/base/strings.h"
30 #include "hphp/util/util.h"
31 #include "hphp/util/trace.h"
32 #include "hphp/util/debug.h"
33 #include "hphp/runtime/base/stat_cache.h"
34 #include "hphp/runtime/base/shared/shared_variant.h"
36 #include "hphp/runtime/vm/treadmill.h"
37 #include "hphp/runtime/vm/php_debug.h"
38 #include "hphp/runtime/vm/debugger_hook.h"
39 #include "hphp/runtime/vm/runtime.h"
40 #include "hphp/runtime/vm/jit/targetcache.h"
41 #include "hphp/runtime/vm/type_constraint.h"
42 #include "hphp/runtime/vm/jit/translator-inline.h"
43 #include "hphp/runtime/ext/ext_string.h"
44 #include "hphp/runtime/ext/ext_error.h"
45 #include "hphp/runtime/ext/ext_closure.h"
46 #include "hphp/runtime/ext/ext_continuation.h"
47 #include "hphp/runtime/ext/ext_function.h"
48 #include "hphp/runtime/ext/ext_variable.h"
49 #include "hphp/runtime/ext/ext_array.h"
50 #include "hphp/runtime/base/stats.h"
51 #include "hphp/runtime/vm/type_profile.h"
52 #include "hphp/runtime/base/server/source_root_info.h"
53 #include "hphp/runtime/base/util/extended_logger.h"
55 #include "hphp/system/lib/systemlib.h"
56 #include "hphp/runtime/ext/ext_collections.h"
58 #include "hphp/runtime/vm/name_value_table_wrapper.h"
59 #include "hphp/runtime/vm/request_arena.h"
60 #include "hphp/util/arena.h"
65 #include <boost/format.hpp>
66 #include <boost/utility/typed_in_place_factory.hpp>
75 // TODO: #1746957, #1756122
76 // we should skip the call in call_user_func_array, if
77 // by reference params are passed by value, or if its
78 // argument is not an array, but currently lots of tests
79 // depend on actually making the call.
80 const bool skipCufOnInvalidParams
= false;
82 // RepoAuthoritative has been raptured out of runtime_option.cpp. It needs
83 // to be closer to other bytecode.cpp data.
84 bool RuntimeOption::RepoAuthoritative
= false;
93 #define OPTBLD_INLINE ALWAYS_INLINE
95 static const Trace::Module TRACEMOD
= Trace::bcinterp
;
99 struct VMPrepareUnwind
: std::exception
{
100 const char* what() const throw() { return "VMPrepareUnwind"; }
105 ActRec
* ActRec::arGetSfp() const {
106 ActRec
* prevFrame
= (ActRec
*)m_savedRbp
;
107 if (LIKELY(((uintptr_t)prevFrame
- Util::s_stackLimit
) >=
108 Util::s_stackSize
)) {
109 if (LIKELY(prevFrame
!= nullptr)) return prevFrame
;
112 return const_cast<ActRec
*>(this);
116 ActRec::skipFrame() const {
117 return m_func
&& m_func
->skipFrame();
121 Class
* arGetContextClassImpl
<false>(const ActRec
* ar
) {
125 return ar
->m_func
->cls();
129 Class
* arGetContextClassImpl
<true>(const ActRec
* ar
) {
133 if (ar
->m_func
->isPseudoMain() || ar
->m_func
->isBuiltin()) {
134 // Pseudomains inherit the context of their caller
135 VMExecutionContext
* context
= g_vmContext
;
136 ar
= context
->getPrevVMState(ar
);
137 while (ar
!= nullptr &&
138 (ar
->m_func
->isPseudoMain() || ar
->m_func
->isBuiltin())) {
139 ar
= context
->getPrevVMState(ar
);
145 return ar
->m_func
->cls();
148 const StaticString
s_call_user_func("call_user_func");
149 const StaticString
s_call_user_func_array("call_user_func_array");
150 const StaticString
s_hphpd_break("hphpd_break");
151 const StaticString
s_fb_enable_code_coverage("fb_enable_code_coverage");
152 const StaticString
s_stdclass("stdclass");
153 const StaticString
s___call("__call");
154 const StaticString
s___callStatic("__callStatic");
155 const StaticString
s_file("file");
156 const StaticString
s_line("line");
157 const StaticString
s_function("function");
158 const StaticString
s_args("args");
159 const StaticString
s_class("class");
160 const StaticString
s_object("object");
161 const StaticString
s_type("type");
162 const StaticString
s_include("include");
164 ///////////////////////////////////////////////////////////////////////////////
166 //=============================================================================
167 // Miscellaneous macros.
170 #define DECODE_JMP(type, var) \
171 type var __attribute__((unused)) = *(type*)pc; \
173 Trace::trace("decode: Immediate %s %" PRIi64"\n", #type, \
175 #define ITER_SKIP(offset) pc = origPc + (offset);
177 #define DECODE(type, var) \
178 DECODE_JMP(type, var); \
180 #define DECODE_IVA(var) \
181 int32_t var UNUSED = decodeVariableSizeImm(&pc); \
183 Trace::trace("decode: Immediate int32 %" PRIi64"\n", \
185 #define DECODE_LITSTR(var) \
189 var = m_fp->m_func->unit()->lookupLitstrId(id); \
192 #define DECODE_HA(var) DECODE_IVA(var)
193 #define DECODE_IA(var) DECODE_IVA(var)
195 #define SYNC() m_pc = pc
197 //=============================================================================
198 // Miscellaneous helpers.
200 static inline Class
* frameStaticClass(ActRec
* fp
) {
202 return fp
->getThis()->getVMClass();
203 } else if (fp
->hasClass()) {
204 return fp
->getClass();
210 //=============================================================================
218 , m_nvTable(boost::in_place
<NameValueTable
>(
219 RuntimeOption::EvalVMInitialGlobalTableSize
))
221 TypedValue globalArray
;
222 globalArray
.m_type
= KindOfArray
;
223 globalArray
.m_data
.parr
=
224 new (request_arena()) GlobalNameValueTableWrapper(&*m_nvTable
);
225 globalArray
.m_data
.parr
->incRefCount();
226 m_nvTable
->set(StringData::GetStaticString("GLOBALS"), &globalArray
);
227 tvRefcountedDecRef(&globalArray
);
230 VarEnv::VarEnv(ActRec
* fp
, ExtraArgs
* eArgs
)
236 const Func
* func
= fp
->m_func
;
237 const Id numNames
= func
->numNamedLocals();
239 if (!numNames
) return;
241 m_nvTable
= boost::in_place
<NameValueTable
>(numNames
);
243 TypedValue
** origLocs
=
244 reinterpret_cast<TypedValue
**>(uintptr_t(this) + sizeof(VarEnv
));
245 TypedValue
* loc
= frame_local(fp
, 0);
246 for (Id i
= 0; i
< numNames
; ++i
, --loc
) {
247 assert(func
->lookupVarId(func
->localVarName(i
)) == (int)i
);
248 origLocs
[i
] = m_nvTable
->migrateSet(func
->localVarName(i
), loc
);
253 TRACE(3, "Destroying VarEnv %p [%s]\n",
255 isGlobalScope() ? "global scope" : "local scope");
256 assert(m_restoreLocations
.empty());
257 if (g_vmContext
->m_topVarEnv
== this) {
258 g_vmContext
->m_topVarEnv
= m_previous
;
261 if (!isGlobalScope()) {
262 if (LIKELY(!m_malloced
)) {
263 varenv_arena().endFrame();
268 * When detaching the global scope, we leak any live objects (and
269 * let the smart allocator clean them up). This is because we're
270 * not supposed to run destructors for objects that are live at
271 * the end of a request.
277 VarEnv
* VarEnv::createLazyAttach(ActRec
* fp
,
278 bool skipInsert
/* = false */) {
279 const Func
* func
= fp
->m_func
;
280 const size_t numNames
= func
->numNamedLocals();
281 ExtraArgs
* eArgs
= fp
->getExtraArgs();
282 const size_t neededSz
= sizeof(VarEnv
) +
283 sizeof(TypedValue
*) * numNames
;
285 TRACE(3, "Creating lazily attached VarEnv\n");
287 if (LIKELY(!skipInsert
)) {
288 auto& va
= varenv_arena();
290 void* mem
= va
.alloc(neededSz
);
291 VarEnv
* ret
= new (mem
) VarEnv(fp
, eArgs
);
292 TRACE(3, "Creating lazily attached VarEnv %p\n", mem
);
293 ret
->setPrevious(g_vmContext
->m_topVarEnv
);
294 g_vmContext
->m_topVarEnv
= ret
;
299 * For skipInsert == true, we're adding a VarEnv in the middle of
300 * the chain, which means we can't use the stack allocation.
302 * The caller must immediately setPrevious, so don't bother setting
303 * it to an invalid pointer except in a debug build.
305 void* mem
= malloc(neededSz
);
306 VarEnv
* ret
= new (mem
) VarEnv(fp
, eArgs
);
307 ret
->m_malloced
= true;
309 ret
->setPrevious((VarEnv
*)-1);
314 VarEnv
* VarEnv::createGlobal() {
315 assert(!g_vmContext
->m_globalVarEnv
);
316 assert(!g_vmContext
->m_topVarEnv
);
318 VarEnv
* ret
= new (request_arena()) VarEnv();
319 TRACE(3, "Creating VarEnv %p [global scope]\n", ret
);
320 g_vmContext
->m_globalVarEnv
= g_vmContext
->m_topVarEnv
= ret
;
324 void VarEnv::destroy(VarEnv
* ve
) {
325 bool malloced
= ve
->m_malloced
;
327 if (UNLIKELY(malloced
)) free(ve
);
330 void VarEnv::attach(ActRec
* fp
) {
331 TRACE(3, "Attaching VarEnv %p [%s] %d fp @%p\n",
333 isGlobalScope() ? "global scope" : "local scope",
334 int(fp
->m_func
->numNamedLocals()), fp
);
335 assert(m_depth
== 0 || fp
->arGetSfp() == m_cfp
||
336 (fp
->arGetSfp() == fp
&& g_vmContext
->isNested()));
340 // Overlay fp's locals, if it has any.
342 const Func
* func
= fp
->m_func
;
343 const Id numNames
= func
->numNamedLocals();
348 m_nvTable
= boost::in_place
<NameValueTable
>(numNames
);
351 TypedValue
** origLocs
= new (varenv_arena()) TypedValue
*[
352 func
->numNamedLocals()];
353 TypedValue
* loc
= frame_local(fp
, 0);
354 for (Id i
= 0; i
< numNames
; ++i
, --loc
) {
355 assert(func
->lookupVarId(func
->localVarName(i
)) == (int)i
);
356 origLocs
[i
] = m_nvTable
->migrate(func
->localVarName(i
), loc
);
358 m_restoreLocations
.push_back(origLocs
);
361 void VarEnv::detach(ActRec
* fp
) {
362 TRACE(3, "Detaching VarEnv %p [%s] @%p\n",
364 isGlobalScope() ? "global scope" : "local scope",
369 // Merge/remove fp's overlaid locals, if it had any.
370 const Func
* func
= fp
->m_func
;
371 if (Id
const numLocals
= func
->numNamedLocals()) {
373 * In the case of a lazily attached VarEnv, we have our locations
374 * for the first (lazy) attach stored immediately following the
375 * VarEnv in memory. In this case m_restoreLocations will be empty.
377 assert((!isGlobalScope() && m_depth
== 1) == m_restoreLocations
.empty());
378 TypedValue
** origLocs
=
379 !m_restoreLocations
.empty()
380 ? m_restoreLocations
.back()
381 : reinterpret_cast<TypedValue
**>(uintptr_t(this) + sizeof(VarEnv
));
383 for (Id i
= 0; i
< numLocals
; i
++) {
384 m_nvTable
->resettle(func
->localVarName(i
), origLocs
[i
]);
386 if (!m_restoreLocations
.empty()) {
387 m_restoreLocations
.pop_back();
391 VMExecutionContext
* context
= g_vmContext
;
392 m_cfp
= context
->getPrevVMState(fp
);
396 // don't free global varEnv
397 if (context
->m_globalVarEnv
!= this) {
398 assert(!isGlobalScope());
404 // This helper is creating a NVT because of dynamic variable accesses,
405 // even though we're already attached to a frame and it had no named
407 void VarEnv::ensureNvt() {
408 const size_t kLazyNvtSize
= 3;
410 m_nvTable
= boost::in_place
<NameValueTable
>(kLazyNvtSize
);
414 void VarEnv::set(const StringData
* name
, TypedValue
* tv
) {
416 m_nvTable
->set(name
, tv
);
419 void VarEnv::bind(const StringData
* name
, TypedValue
* tv
) {
421 m_nvTable
->bind(name
, tv
);
424 void VarEnv::setWithRef(const StringData
* name
, TypedValue
* tv
) {
425 if (tv
->m_type
== KindOfRef
) {
432 TypedValue
* VarEnv::lookup(const StringData
* name
) {
436 return m_nvTable
->lookup(name
);
439 TypedValue
* VarEnv::lookupAdd(const StringData
* name
) {
441 return m_nvTable
->lookupAdd(name
);
444 TypedValue
* VarEnv::lookupRawPointer(const StringData
* name
) {
446 return m_nvTable
->lookupRawPointer(name
);
449 TypedValue
* VarEnv::lookupAddRawPointer(const StringData
* name
) {
451 return m_nvTable
->lookupAddRawPointer(name
);
454 bool VarEnv::unset(const StringData
* name
) {
455 if (!m_nvTable
) return true;
456 m_nvTable
->unset(name
);
460 Array
VarEnv::getDefinedVariables() const {
461 Array ret
= Array::Create();
463 if (!m_nvTable
) return ret
;
465 NameValueTable::Iterator
iter(&*m_nvTable
);
466 for (; iter
.valid(); iter
.next()) {
467 const StringData
* sd
= iter
.curKey();
468 const TypedValue
* tv
= iter
.curVal();
469 if (tvAsCVarRef(tv
).isReferenced()) {
470 ret
.setRef(StrNR(sd
).asString(), tvAsCVarRef(tv
));
472 ret
.add(StrNR(sd
).asString(), tvAsCVarRef(tv
));
479 TypedValue
* VarEnv::getExtraArg(unsigned argInd
) const {
480 return m_extraArgs
->getExtraArg(argInd
);
483 //=============================================================================
485 ExtraArgs::ExtraArgs() {}
486 ExtraArgs::~ExtraArgs() {}
488 void* ExtraArgs::allocMem(unsigned nargs
) {
489 return smart_malloc(sizeof(TypedValue
) * nargs
+ sizeof(ExtraArgs
));
492 ExtraArgs
* ExtraArgs::allocateCopy(TypedValue
* args
, unsigned nargs
) {
493 void* mem
= allocMem(nargs
);
494 ExtraArgs
* ea
= new (mem
) ExtraArgs();
497 * The stack grows downward, so the args in memory are "backward"; i.e. the
498 * leftmost (in PHP) extra arg is highest in memory.
500 std::reverse_copy(args
, args
+ nargs
, &ea
->m_extraArgs
[0]);
504 ExtraArgs
* ExtraArgs::allocateUninit(unsigned nargs
) {
505 void* mem
= ExtraArgs::allocMem(nargs
);
506 return new (mem
) ExtraArgs();
509 void ExtraArgs::deallocate(ExtraArgs
* ea
, unsigned nargs
) {
512 for (unsigned i
= 0; i
< nargs
; ++i
) {
513 tvRefcountedDecRef(ea
->m_extraArgs
+ i
);
519 void ExtraArgs::deallocate(ActRec
* ar
) {
520 const int numExtra
= ar
->numArgs() - ar
->m_func
->numParams();
521 deallocate(ar
->getExtraArgs(), numExtra
);
524 TypedValue
* ExtraArgs::getExtraArg(unsigned argInd
) const {
525 return const_cast<TypedValue
*>(&m_extraArgs
[argInd
]);
528 //=============================================================================
531 // Store actual stack elements array in a thread-local in order to amortize the
532 // cost of allocation.
535 StackElms() : m_elms(nullptr) {}
540 if (m_elms
== nullptr) {
541 // RuntimeOption::EvalVMStackElms-sized and -aligned.
542 size_t algnSz
= RuntimeOption::EvalVMStackElms
* sizeof(TypedValue
);
543 if (posix_memalign((void**)&m_elms
, algnSz
, algnSz
) != 0) {
544 throw std::runtime_error(
545 std::string("VM stack initialization failed: ") + strerror(errno
));
551 if (m_elms
!= nullptr) {
559 IMPLEMENT_THREAD_LOCAL(StackElms
, t_se
);
561 const int Stack::sSurprisePageSize
= sysconf(_SC_PAGESIZE
);
562 // We reserve the bottom page of each stack for use as the surprise
563 // page, so the minimum useful stack size is the next power of two.
564 const uint
Stack::sMinStackElms
= 2 * sSurprisePageSize
/ sizeof(TypedValue
);
566 void Stack::ValidateStackSize() {
567 if (RuntimeOption::EvalVMStackElms
< sMinStackElms
) {
568 throw std::runtime_error(str(
569 boost::format("VM stack size of 0x%llx is below the minimum of 0x%x")
570 % RuntimeOption::EvalVMStackElms
573 if (!Util::isPowerOfTwo(RuntimeOption::EvalVMStackElms
)) {
574 throw std::runtime_error(str(
575 boost::format("VM stack size of 0x%llx is not a power of 2")
576 % RuntimeOption::EvalVMStackElms
));
581 : m_elms(nullptr), m_top(nullptr), m_base(nullptr) {
591 mprotect(m_elms
, sizeof(void*), PROT_NONE
);
598 mprotect(m_elms
, sizeof(void*), PROT_READ
| PROT_WRITE
);
603 Stack::requestInit() {
604 m_elms
= t_se
->elms();
606 RequestInjectionData
& data
= ThreadInfo::s_threadInfo
->m_reqInjectionData
;
607 Lock
l(data
.surpriseLock
);
608 assert(data
.surprisePage
== nullptr);
609 data
.surprisePage
= m_elms
;
611 // Burn one element of the stack, to satisfy the constraint that
612 // valid m_top values always have the same high-order (>
613 // log(RuntimeOption::EvalVMStackElms)) bits.
614 m_top
= m_base
= m_elms
+ RuntimeOption::EvalVMStackElms
- 1;
616 // Because of the surprise page at the bottom of the stack we lose an
617 // additional 256 elements which must be taken into account when checking for
619 UNUSED
size_t maxelms
=
620 RuntimeOption::EvalVMStackElms
- sSurprisePageSize
/ sizeof(TypedValue
);
621 assert(!wouldOverflow(maxelms
- 1));
622 assert(wouldOverflow(maxelms
));
624 // Reset permissions on our stack's surprise page
629 Stack::requestExit() {
630 if (m_elms
!= nullptr) {
632 RequestInjectionData
& data
= ThreadInfo::s_threadInfo
->m_reqInjectionData
;
633 Lock
l(data
.surpriseLock
);
634 assert(data
.surprisePage
== m_elms
);
636 data
.surprisePage
= nullptr;
642 void flush_evaluation_stack() {
643 if (g_context
.isNull()) {
644 // For RPCRequestHandler threads, the ExecutionContext can stay alive
645 // across requests, and hold references to the VM stack, and
646 // the TargetCache needs to keep track of which classes are live etc
647 // So only flush the VM stack and the target cache if the execution
650 if (!t_se
.isNull()) {
653 TargetCache::flush();
657 void Stack::toStringElm(std::ostream
& os
, TypedValue
* tv
, const ActRec
* fp
)
659 if (tv
->m_type
< MinDataType
|| tv
->m_type
> MaxNumDataTypes
) {
660 os
<< " ??? type " << tv
->m_type
<< "\n";
663 assert(tv
->m_type
>= MinDataType
&& tv
->m_type
< MaxNumDataTypes
);
664 if (IS_REFCOUNTED_TYPE(tv
->m_type
) && tv
->m_data
.pref
->_count
<= 0) {
665 // OK in the invoking frame when running a destructor.
666 os
<< " ??? inner_count " << tv
->m_data
.pref
->_count
<< " ";
669 switch (tv
->m_type
) {
672 os
<< "@" << tv
->m_data
.pref
;
673 tv
= tv
->m_data
.pref
->tv(); // Unbox so contents get printed below
674 assert(tv
->m_type
!= KindOfRef
);
675 toStringElm(os
, tv
, fp
);
685 switch (tv
->m_type
) {
694 case KindOfBoolean
: {
695 os
<< (tv
->m_data
.num
? "True" : "False");
699 os
<< "0x" << std::hex
<< tv
->m_data
.num
<< std::dec
;
703 os
<< tv
->m_data
.dbl
;
706 case KindOfStaticString
:
708 int len
= tv
->m_data
.pstr
->size();
709 bool truncated
= false;
714 os
<< tv
->m_data
.pstr
715 << "c(" << tv
->m_data
.pstr
->getCount() << ")"
717 << Util::escapeStringForCPP(tv
->m_data
.pstr
->data(), len
)
718 << "\"" << (truncated
? "..." : "");
722 assert(tv
->m_data
.parr
->getCount() > 0);
723 os
<< tv
->m_data
.parr
724 << "c(" << tv
->m_data
.parr
->getCount() << ")"
729 assert(tv
->m_data
.pobj
->getCount() > 0);
730 os
<< tv
->m_data
.pobj
731 << "c(" << tv
->m_data
.pobj
->getCount() << ")"
733 << tvAsVariant(tv
).asObjRef().get()->o_getClassName().get()->data()
741 os
<< tv
->m_data
.pcls
742 << ":" << tv
->m_data
.pcls
->name()->data();
752 void Stack::toStringIter(std::ostream
& os
, Iter
* it
, bool itRef
) const {
754 os
<< "I:MutableArray";
757 switch (it
->arr().getIterType()) {
758 case ArrayIter::TypeUndefined
: {
762 case ArrayIter::TypeArray
: {
766 case ArrayIter::TypeIterator
: {
778 void Stack::toStringFrag(std::ostream
& os
, const ActRec
* fp
,
779 const TypedValue
* top
) const {
782 // The only way to figure out which stack elements are activation records is
783 // to follow the frame chain. However, the goal for each stack frame is to
784 // print stack fragments from deepest to shallowest -- a then b in the
785 // following example:
787 // {func:foo,soff:51}<C:8> {func:bar} C:8 C:1 {func:biz} C:0
788 // aaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbb
790 // Use depth-first recursion to get the output order correct.
792 if (LIKELY(!fp
->m_func
->isGenerator())) {
793 tv
= frameStackBase(fp
);
795 tv
= generatorStackBase(fp
);
798 for (tv
--; (uintptr_t)tv
>= (uintptr_t)top
; tv
--) {
800 toStringElm(os
, tv
, fp
);
804 void Stack::toStringAR(std::ostream
& os
, const ActRec
* fp
,
805 const FPIEnt
*fe
, const TypedValue
* top
) const {
807 if (LIKELY(!fp
->m_func
->isGenerator())) {
808 ar
= arAtOffset(fp
, -fe
->m_fpOff
);
810 // Deal with generators' split stacks. See unwindAR for reasoning.
811 TypedValue
* genStackBase
= generatorStackBase(fp
);
813 (ActRec
*)(genStackBase
+ fp
->m_func
->numSlotsInFrame());
814 ar
= arAtOffset(fakePrevFP
, -fe
->m_fpOff
);
817 if (fe
->m_parentIndex
!= -1) {
818 toStringAR(os
, fp
, &fp
->m_func
->fpitab()[fe
->m_parentIndex
],
819 (TypedValue
*)&ar
[1]);
821 toStringFrag(os
, fp
, (TypedValue
*)&ar
[1]);
824 os
<< " {func:" << ar
->m_func
->fullName()->data() << "}";
825 TypedValue
* tv
= (TypedValue
*)ar
;
826 for (tv
--; (uintptr_t)tv
>= (uintptr_t)top
; tv
--) {
828 toStringElm(os
, tv
, fp
);
832 void Stack::toStringFragAR(std::ostream
& os
, const ActRec
* fp
,
833 int offset
, const TypedValue
* top
) const {
834 const FPIEnt
*fe
= fp
->m_func
->findFPI(offset
);
836 toStringAR(os
, fp
, fe
, top
);
838 toStringFrag(os
, fp
, top
);
842 void Stack::toStringFrame(std::ostream
& os
, const ActRec
* fp
,
843 int offset
, const TypedValue
* ftop
,
844 const string
& prefix
) const {
847 // Use depth-first recursion to output the most deeply nested stack frame
851 TypedValue
* prevStackTop
= nullptr;
852 ActRec
* prevFp
= g_vmContext
->getPrevVMState(fp
, &prevPc
, &prevStackTop
);
853 if (prevFp
!= nullptr) {
854 toStringFrame(os
, prevFp
, prevPc
, prevStackTop
, prefix
);
859 const Func
* func
= fp
->m_func
;
862 string
funcName(func
->fullName()->data());
863 os
<< "{func:" << funcName
864 << ",soff:" << fp
->m_soff
865 << ",this:0x" << std::hex
<< (fp
->hasThis() ? fp
->getThis() : nullptr)
867 TypedValue
* tv
= (TypedValue
*)fp
;
870 if (func
->numLocals() > 0) {
872 int n
= func
->numLocals();
873 for (int i
= 0; i
< n
; i
++, tv
--) {
877 toStringElm(os
, tv
, fp
);
882 assert(!func
->info() || func
->numIterators() == 0);
883 if (func
->numIterators() > 0) {
885 Iter
* it
= &((Iter
*)&tv
[1])[-1];
886 for (int i
= 0; i
< func
->numIterators(); i
++, it
--) {
891 if (func
->checkIterScope(offset
, i
, itRef
)) {
892 toStringIter(os
, it
, itRef
);
900 toStringFragAR(os
, fp
, offset
, ftop
);
905 string
Stack::toString(const ActRec
* fp
, int offset
,
906 const string prefix
/* = "" */) const {
907 std::ostringstream os
;
908 os
<< prefix
<< "=== Stack at " << curUnit()->filepath()->data() << ":" <<
909 curUnit()->getLineNumber(curUnit()->offsetOf(vmpc())) << " func " <<
910 curFunc()->fullName()->data() << " ===\n";
912 toStringFrame(os
, fp
, offset
, m_top
, prefix
);
917 UnwindStatus
Stack::unwindFrag(ActRec
* fp
, int offset
,
918 PC
& pc
, Fault
& fault
) {
919 const Func
* func
= fp
->m_func
;
920 FTRACE(1, "unwindFrag: func {} ({})\n",
921 func
->fullName()->data(), func
->unit()->filepath()->data());
923 const bool unwindingGeneratorFrame
= func
->isGenerator();
924 auto const curOp
= *reinterpret_cast<const Opcode
*>(pc
);
925 using namespace HPHP
;
926 const bool unwindingReturningFrame
= curOp
== OpRetC
|| curOp
== OpRetV
;
928 if (UNLIKELY(unwindingGeneratorFrame
)) {
929 assert(!isValidAddress((uintptr_t)fp
));
930 evalTop
= generatorStackBase(fp
);
932 assert(isValidAddress((uintptr_t)fp
));
933 evalTop
= frameStackBase(fp
);
935 assert(isValidAddress((uintptr_t)evalTop
));
936 assert(evalTop
>= m_top
);
938 while (m_top
< evalTop
) {
943 * This code is repeatedly called with the same offset when an
944 * exception is raised and rethrown by fault handlers. This
945 * `faultNest' iterator is here to skip the EHEnt handlers that have
946 * already been run for this in-flight exception.
948 if (const EHEnt
* eh
= func
->findEH(offset
)) {
951 assert(faultNest
<= fault
.m_handledCount
);
952 if (faultNest
== fault
.m_handledCount
) {
953 ++fault
.m_handledCount
;
955 switch (eh
->m_ehtype
) {
956 case EHEnt::EHType_Fault
:
957 FTRACE(1, "unwindFrag: entering fault at {}: save {}\n",
959 func
->unit()->offsetOf(pc
));
960 fault
.m_savedRaiseOffset
= func
->unit()->offsetOf(pc
);
961 pc
= (uchar
*)(func
->unit()->entry() + eh
->m_fault
);
962 return UnwindResumeVM
;
963 case EHEnt::EHType_Catch
:
964 // Note: we skip catch clauses if we have a pending C++ exception
965 // as part of our efforts to avoid running more PHP code in the
966 // face of such exceptions.
967 if ((fault
.m_faultType
== Fault::UserException
) &&
968 (ThreadInfo::s_threadInfo
->m_pendingException
== nullptr)) {
969 ObjectData
* obj
= fault
.m_userException
;
970 for (auto& idOff
: eh
->m_catches
) {
971 auto handler
= func
->unit()->at(idOff
.second
);
972 FTRACE(1, "unwindFrag: catch candidate {}\n", handler
);
973 Class
* cls
= Unit::lookupClass(
974 func
->unit()->lookupNamedEntityId(idOff
.first
)
976 if (cls
&& obj
->instanceof(cls
)) {
978 FTRACE(1, "unwindFrag: entering catch at {}\n", pc
);
979 return UnwindResumeVM
;
987 if (eh
->m_parentIndex
!= -1) {
988 eh
= &func
->ehtab()[eh
->m_parentIndex
];
996 // We found no more handlers in this frame, so the nested fault
997 // count starts over for the caller frame.
998 fault
.m_handledCount
= 0;
1000 if (fp
->isFromFPushCtor() && fp
->hasThis()) {
1001 fp
->getThis()->setNoDestruct();
1004 // A generator's locals don't live on this stack.
1005 if (LIKELY(!unwindingGeneratorFrame
)) {
1007 * If we're unwinding through a frame that's returning, it's only
1008 * possible that its locals have already been decref'd.
1012 * - If a destructor for any of these things throws a php
1013 * exception, it's swallowed at the dtor boundary and we keep
1016 * - If the destructor for any of these things throws a fatal,
1017 * it's swallowed, and we set surprise flags to throw a fatal
1020 * - If the second case happened and we have to run another
1021 * destructor, its enter hook will throw, but it will be
1024 * - Finally, the exit hook for the returning function can
1025 * throw, but this happens last so everything is destructed.
1028 if (!unwindingReturningFrame
) {
1030 // Note that we must convert locals and the $this to
1031 // uninit/zero during unwind. This is because a backtrace
1032 // from another destructing object during this unwind may try
1034 frame_free_locals_unwind(fp
, func
->numLocals());
1037 ndiscard(func
->numSlotsInFrame());
1039 FTRACE(1, "unwindFrag: propagate\n");
1040 return UnwindPropagate
;
1043 void Stack::unwindARFrag(ActRec
* ar
) {
1044 while (m_top
< (TypedValue
*)ar
) {
1049 void Stack::unwindAR(ActRec
* fp
, const FPIEnt
* fe
) {
1051 TRACE(1, "unwindAR: function %s, pIdx %d\n",
1052 fp
->m_func
->name()->data(), fe
->m_parentIndex
);
1054 if (LIKELY(!fp
->m_func
->isGenerator())) {
1055 ar
= arAtOffset(fp
, -fe
->m_fpOff
);
1057 // fp is pointing into the continuation object. Since fpOff is given as an
1058 // offset from the frame pointer as if it were in the normal place on the
1059 // main stack, we have to reconstruct that "normal place".
1060 TypedValue
* genStackBase
= generatorStackBase(fp
);
1061 ActRec
* fakePrevFP
=
1062 (ActRec
*)(genStackBase
+ fp
->m_func
->numSlotsInFrame());
1063 ar
= arAtOffset(fakePrevFP
, -fe
->m_fpOff
);
1065 assert((TypedValue
*)ar
>= m_top
);
1068 if (ar
->isFromFPushCtor()) {
1069 assert(ar
->hasThis());
1070 ar
->getThis()->setNoDestruct();
1074 if (fe
->m_parentIndex
!= -1) {
1075 fe
= &fp
->m_func
->fpitab()[fe
->m_parentIndex
];
1082 UnwindStatus
Stack::unwindFrame(ActRec
*& fp
, int offset
, PC
& pc
, Fault fault
) {
1083 VMExecutionContext
* context
= g_vmContext
;
1086 SrcKey
sk(fp
->m_func
, offset
);
1087 SKTRACE(1, sk
, "unwindFrame: func %s, offset %d fp %p\n",
1088 fp
->m_func
->name()->data(),
1091 // If the exception is already propagating, if it was in any FPI
1092 // region we already handled unwinding it the first time around.
1093 if (fault
.m_handledCount
== 0) {
1094 if (const FPIEnt
*fe
= fp
->m_func
->findFPI(offset
)) {
1099 if (unwindFrag(fp
, offset
, pc
, fault
) == UnwindResumeVM
) {
1100 // We've kept our own copy of the Fault, because m_faults may
1101 // change if we have a reentry during unwinding. When we're
1102 // ready to resume, we need to replace the current fault to
1103 // reflect any state changes we've made (handledCount, etc).
1104 assert(!context
->m_faults
.empty());
1105 context
->m_faults
.back() = fault
;
1106 return UnwindResumeVM
;
1109 ActRec
*prevFp
= fp
->arGetSfp();
1110 SKTRACE(1, sk
, "unwindFrame: fp %p prevFp %p\n",
1112 if (LIKELY(!fp
->m_func
->isGenerator())) {
1113 // We don't need to refcount the AR's refcounted members; that was
1114 // taken care of in frame_free_locals, called from unwindFrag().
1115 // If it's a generator, the AR doesn't live on this stack.
1120 TRACE(1, "unwindFrame: reached the end of this nesting's ActRec "
1124 // Keep the pc up to date while unwinding.
1125 Offset prevOff
= fp
->m_soff
+ prevFp
->m_func
->base();
1126 const Func
*prevF
= prevFp
->m_func
;
1127 assert(isValidAddress((uintptr_t)prevFp
) || prevF
->isGenerator());
1128 pc
= prevF
->unit()->at(prevOff
);
1133 return UnwindPropagate
;
1136 bool Stack::wouldOverflow(int numCells
) const {
1137 // The funny approach here is to validate the translator's assembly
1138 // technique. We've aligned and sized the stack so that the high order
1139 // bits of valid cells are all the same. In the translator, numCells
1140 // can be hardcoded, and m_top is wired into a register,
1141 // so the expression requires no loads.
1142 intptr_t truncatedTop
= intptr_t(m_top
) / sizeof(TypedValue
);
1143 truncatedTop
&= RuntimeOption::EvalVMStackElms
- 1;
1144 intptr_t diff
= truncatedTop
- numCells
-
1145 sSurprisePageSize
/ sizeof(TypedValue
);
1149 TypedValue
* Stack::frameStackBase(const ActRec
* fp
) {
1150 const Func
* func
= fp
->m_func
;
1151 assert(!func
->isGenerator());
1152 return (TypedValue
*)((uintptr_t)fp
1153 - (uintptr_t)(func
->numLocals()) * sizeof(TypedValue
)
1154 - (uintptr_t)(func
->numIterators() * sizeof(Iter
)));
1157 TypedValue
* Stack::generatorStackBase(const ActRec
* fp
) {
1158 assert(fp
->m_func
->isGenerator());
1159 VMExecutionContext
* context
= g_vmContext
;
1160 ActRec
* sfp
= fp
->arGetSfp();
1162 // In the reentrant case, we can consult the savedVM state. We simply
1163 // use the top of stack of the previous VM frame (since the ActRec,
1164 // locals, and iters for this frame do not reside on the VM stack).
1165 return context
->m_nestedVMs
.back().m_savedState
.sp
;
1167 // In the non-reentrant case, we know generators are always called from a
1168 // function with an empty stack. So we find the caller's FP, compensate
1169 // for its locals, and then we've found the base of the generator's stack.
1170 return (TypedValue
*)sfp
- sfp
->m_func
->numSlotsInFrame();
1174 __thread RequestArenaStorage s_requestArenaStorage
;
1175 __thread VarEnvArenaStorage s_varEnvArenaStorage
;
1178 //=============================================================================
1179 // ExecutionContext.
1181 using namespace HPHP
;
1182 using namespace HPHP::MethodLookup
;
1184 ActRec
* VMExecutionContext::getOuterVMFrame(const ActRec
* ar
) {
1185 ActRec
* prevFrame
= (ActRec
*)ar
->m_savedRbp
;
1186 if (LIKELY(((uintptr_t)prevFrame
- Util::s_stackLimit
) >=
1187 Util::s_stackSize
)) {
1188 if (LIKELY(prevFrame
!= nullptr)) return prevFrame
;
1191 if (LIKELY(!m_nestedVMs
.empty())) return m_nestedVMs
.back().m_savedState
.fp
;
1195 TypedValue
* VMExecutionContext::lookupClsCns(const NamedEntity
* ne
,
1196 const StringData
* cls
,
1197 const StringData
* cns
) {
1198 Class
* class_
= Unit::loadClass(ne
, cls
);
1199 if (class_
== nullptr) {
1200 raise_error(Strings::UNKNOWN_CLASS
, cls
->data());
1202 TypedValue
* clsCns
= class_
->clsCnsGet(cns
);
1203 if (clsCns
== nullptr) {
1204 raise_error("Couldn't find constant %s::%s",
1205 cls
->data(), cns
->data());
1210 TypedValue
* VMExecutionContext::lookupClsCns(const StringData
* cls
,
1211 const StringData
* cns
) {
1212 return lookupClsCns(Unit::GetNamedEntity(cls
), cls
, cns
);
1215 // Look up the method specified by methodName from the class specified by cls
1216 // and enforce accessibility. Accessibility checks depend on the relationship
1217 // between the class that first declared the method (baseClass) and the context
1220 // If there are multiple accessible methods with the specified name declared in
1221 // cls and ancestors of cls, the method from the most derived class will be
1222 // returned, except if we are doing an ObjMethod call ("$obj->foo()") and there
1223 // is an accessible private method, in which case the accessible private method
1224 // will be returned.
1226 // Accessibility rules:
1228 // | baseClass/ctx relationship | public | protected | private |
1229 // +----------------------------+--------+-----------+---------+
1230 // | anon/unrelated | yes | no | no |
1231 // | baseClass == ctx | yes | yes | yes |
1232 // | baseClass derived from ctx | yes | yes | no |
1233 // | ctx derived from baseClass | yes | yes | no |
1234 // +----------------------------+--------+-----------+---------+
1236 const Func
* VMExecutionContext::lookupMethodCtx(const Class
* cls
,
1237 const StringData
* methodName
,
1240 bool raise
/* = false */) {
1242 if (callType
== CtorMethod
) {
1243 assert(methodName
== nullptr);
1244 method
= cls
->getCtor();
1246 assert(callType
== ObjMethod
|| callType
== ClsMethod
);
1247 assert(methodName
!= nullptr);
1248 method
= cls
->lookupMethod(methodName
);
1250 static StringData
* sd__construct
1251 = StringData::GetStaticString("__construct");
1252 if (UNLIKELY(methodName
== sd__construct
)) {
1253 // We were looking up __construct and failed to find it. Fall back
1254 // to old-style constructor: same as class name.
1255 method
= cls
->getCtor();
1256 if (!Func::isSpecial(method
->name())) break;
1259 raise_error("Call to undefined method %s::%s from %s%s",
1260 cls
->name()->data(),
1262 ctx
? "context " : "anonymous context",
1263 ctx
? ctx
->name()->data() : "");
1269 bool accessible
= true;
1270 // If we found a protected or private method, we need to do some
1271 // accessibility checks.
1272 if ((method
->attrs() & (AttrProtected
|AttrPrivate
)) &&
1273 !g_vmContext
->getDebuggerBypassCheck()) {
1274 Class
* baseClass
= method
->baseCls();
1276 // If the context class is the same as the class that first
1277 // declared this method, then we know we have the right method
1278 // and we can stop here.
1279 if (ctx
== baseClass
) {
1282 // The anonymous context cannot access protected or private methods,
1283 // so we can fail fast here.
1284 if (ctx
== nullptr) {
1286 raise_error("Call to %s method %s::%s from anonymous context",
1287 (method
->attrs() & AttrPrivate
) ? "private" : "protected",
1288 cls
->name()->data(),
1289 method
->name()->data());
1294 if (method
->attrs() & AttrPrivate
) {
1295 // The context class is not the same as the class that declared
1296 // this private method, so this private method is not accessible.
1297 // We need to keep going because the context class may define a
1298 // private method with this name.
1301 // If the context class is derived from the class that first
1302 // declared this protected method, then we know this method is
1303 // accessible and we know the context class cannot have a private
1304 // method with the same name, so we're done.
1305 if (ctx
->classof(baseClass
)) {
1308 if (!baseClass
->classof(ctx
)) {
1309 // The context class is not the same, an ancestor, or a descendent
1310 // of the class that first declared this protected method, so
1311 // this method is not accessible. Because the context class is
1312 // not the same or an ancestor of the class which first declared
1313 // the method, we know that the context class is not the same
1314 // or an ancestor of cls, and therefore we don't need to check
1315 // if the context class declares a private method with this name,
1316 // so we can fail fast here.
1318 raise_error("Call to protected method %s::%s from context %s",
1319 cls
->name()->data(),
1320 method
->name()->data(),
1321 ctx
->name()->data());
1325 // We now know this protected method is accessible, but we need to
1326 // keep going because the context class may define a private method
1328 assert(accessible
&& baseClass
->classof(ctx
));
1331 // If this is an ObjMethod call ("$obj->foo()") AND there is an ancestor
1332 // of cls that declares a private method with this name AND the context
1333 // class is an ancestor of cls, check if the context class declares a
1334 // private method with this name.
1335 if (method
->hasPrivateAncestor() && callType
== ObjMethod
&&
1336 ctx
&& cls
->classof(ctx
)) {
1337 const Func
* ctxMethod
= ctx
->lookupMethod(methodName
);
1338 if (ctxMethod
&& ctxMethod
->cls() == ctx
&&
1339 (ctxMethod
->attrs() & AttrPrivate
)) {
1340 // For ObjMethod calls a private method from the context class
1341 // trumps any other method we may have found.
1349 raise_error("Call to private method %s::%s from %s%s",
1350 method
->baseCls()->name()->data(),
1351 method
->name()->data(),
1352 ctx
? "context " : "anonymous context",
1353 ctx
? ctx
->name()->data() : "");
1358 LookupResult
VMExecutionContext::lookupObjMethod(const Func
*& f
,
1360 const StringData
* methodName
,
1361 bool raise
/* = false */) {
1362 Class
* ctx
= arGetContextClass(getFP());
1363 f
= lookupMethodCtx(cls
, methodName
, ctx
, ObjMethod
, false);
1365 f
= cls
->lookupMethod(s___call
.get());
1368 // Throw a fatal error
1369 lookupMethodCtx(cls
, methodName
, ctx
, ObjMethod
, true);
1371 return MethodNotFound
;
1373 return MagicCallFound
;
1375 if (f
->attrs() & AttrStatic
&& !f
->isClosureBody()) {
1376 return MethodFoundNoThis
;
1378 return MethodFoundWithThis
;
1382 VMExecutionContext::lookupClsMethod(const Func
*& f
,
1384 const StringData
* methodName
,
1386 bool raise
/* = false */) {
1387 Class
* ctx
= arGetContextClass(getFP());
1388 f
= lookupMethodCtx(cls
, methodName
, ctx
, ClsMethod
, false);
1390 if (obj
&& obj
->instanceof(cls
)) {
1391 f
= obj
->getVMClass()->lookupMethod(s___call
.get());
1394 f
= cls
->lookupMethod(s___callStatic
.get());
1397 // Throw a fatal errpr
1398 lookupMethodCtx(cls
, methodName
, ctx
, ClsMethod
, true);
1400 return MethodNotFound
;
1404 assert(f
->attrs() & AttrStatic
);
1405 return MagicCallStaticFound
;
1409 // __call cannot be static, this should be enforced by semantic
1410 // checks defClass time or earlier
1411 assert(!(f
->attrs() & AttrStatic
));
1412 return MagicCallFound
;
1414 if (obj
&& !(f
->attrs() & AttrStatic
) && obj
->instanceof(cls
)) {
1415 return MethodFoundWithThis
;
1417 return MethodFoundNoThis
;
1420 LookupResult
VMExecutionContext::lookupCtorMethod(const Func
*& f
,
1422 bool raise
/* = false */) {
1424 if (!(f
->attrs() & AttrPublic
)) {
1425 Class
* ctx
= arGetContextClass(getFP());
1426 f
= lookupMethodCtx(cls
, nullptr, ctx
, CtorMethod
, raise
);
1428 // If raise was true than lookupMethodCtx should have thrown,
1429 // so we should only be able to get here if raise was false
1431 return MethodNotFound
;
1434 return MethodFoundWithThis
;
1437 ObjectData
* VMExecutionContext::createObject(StringData
* clsName
,
1439 bool init
/* = true */) {
1440 Class
* class_
= Unit::loadClass(clsName
);
1441 if (class_
== nullptr) {
1442 throw_missing_class(clsName
->data());
1445 o
= newInstance(class_
);
1449 invokeFunc(&ret
, class_
->getCtor(), params
, o
.get());
1450 tvRefcountedDecRef(&ret
);
1453 ObjectData
* ret
= o
.detach();
1458 ObjectData
* VMExecutionContext::createObjectOnly(StringData
* clsName
) {
1459 return createObject(clsName
, null_array
, false);
1462 ActRec
* VMExecutionContext::getStackFrame() {
1467 ObjectData
* VMExecutionContext::getThis() {
1469 ActRec
* fp
= getFP();
1470 if (fp
->skipFrame()) {
1471 fp
= getPrevVMState(fp
);
1472 if (!fp
) return nullptr;
1474 if (fp
->hasThis()) {
1475 return fp
->getThis();
1480 Class
* VMExecutionContext::getContextClass() {
1482 ActRec
* ar
= getFP();
1483 assert(ar
!= nullptr);
1484 if (ar
->skipFrame()) {
1485 ar
= getPrevVMState(ar
);
1486 if (!ar
) return nullptr;
1488 return ar
->m_func
->cls();
1491 Class
* VMExecutionContext::getParentContextClass() {
1492 if (Class
* ctx
= getContextClass()) {
1493 return ctx
->parent();
1498 CStrRef
VMExecutionContext::getContainingFileName() {
1500 ActRec
* ar
= getFP();
1501 if (ar
== nullptr) return empty_string
;
1502 if (ar
->skipFrame()) {
1503 ar
= getPrevVMState(ar
);
1504 if (ar
== nullptr) return empty_string
;
1506 Unit
* unit
= ar
->m_func
->unit();
1507 return unit
->filepathRef();
1510 int VMExecutionContext::getLine() {
1512 ActRec
* ar
= getFP();
1513 Unit
* unit
= ar
? ar
->m_func
->unit() : nullptr;
1514 Offset pc
= unit
? pcOff() : 0;
1515 if (ar
== nullptr) return -1;
1516 if (ar
->skipFrame()) {
1517 ar
= getPrevVMState(ar
, &pc
);
1519 if (ar
== nullptr || (unit
= ar
->m_func
->unit()) == nullptr) return -1;
1520 return unit
->getLineNumber(pc
);
1523 Array
VMExecutionContext::getCallerInfo() {
1525 Array result
= Array::Create();
1526 ActRec
* ar
= getFP();
1527 if (ar
->skipFrame()) {
1528 ar
= getPrevVMState(ar
);
1530 while (ar
->m_func
->name()->isame(s_call_user_func
.get())
1531 || ar
->m_func
->name()->isame(s_call_user_func_array
.get())) {
1532 ar
= getPrevVMState(ar
);
1533 if (ar
== nullptr) {
1539 ar
= getPrevVMState(ar
, &pc
);
1540 while (ar
!= nullptr) {
1541 if (!ar
->m_func
->name()->isame(s_call_user_func
.get())
1542 && !ar
->m_func
->name()->isame(s_call_user_func_array
.get())) {
1543 Unit
* unit
= ar
->m_func
->unit();
1545 if ((lineNumber
= unit
->getLineNumber(pc
)) != -1) {
1546 assert(!unit
->filepath()->size() ||
1547 unit
->filepath()->data()[0] == '/');
1548 result
.set(s_file
, unit
->filepath()->data(), true);
1549 result
.set(s_line
, lineNumber
);
1553 ar
= getPrevVMState(ar
, &pc
);
1558 bool VMExecutionContext::renameFunction(const StringData
* oldName
,
1559 const StringData
* newName
) {
1560 return m_renamedFuncs
.rename(oldName
, newName
);
1563 bool VMExecutionContext::isFunctionRenameable(const StringData
* name
) {
1564 return m_renamedFuncs
.isFunctionRenameable(name
);
1567 void VMExecutionContext::addRenameableFunctions(ArrayData
* arr
) {
1568 m_renamedFuncs
.addRenameableFunctions(arr
);
1571 VarEnv
* VMExecutionContext::getVarEnv() {
1572 Transl::VMRegAnchor _
;
1574 VarEnv
* builtinVarEnv
= nullptr;
1575 ActRec
* fp
= getFP();
1576 if (UNLIKELY(!fp
)) return NULL
;
1577 if (fp
->skipFrame()) {
1578 if (fp
->hasVarEnv()) {
1579 builtinVarEnv
= fp
->getVarEnv();
1581 fp
= getPrevVMState(fp
);
1583 if (!fp
) return nullptr;
1584 assert(!fp
->hasInvName());
1585 if (!fp
->hasVarEnv()) {
1586 if (builtinVarEnv
) {
1587 // If the builtin function has its own VarEnv, we temporarily
1588 // remove it from the list before making a VarEnv for the calling
1589 // function to satisfy various asserts
1590 assert(builtinVarEnv
== m_topVarEnv
);
1591 m_topVarEnv
= m_topVarEnv
->previous();
1593 fp
->m_varEnv
= VarEnv::createLazyAttach(fp
);
1594 if (builtinVarEnv
) {
1595 // Put the builtin function's VarEnv back in the list
1596 builtinVarEnv
->setPrevious(fp
->m_varEnv
);
1597 m_topVarEnv
= builtinVarEnv
;
1600 return fp
->m_varEnv
;
1603 void VMExecutionContext::setVar(StringData
* name
, TypedValue
* v
, bool ref
) {
1604 Transl::VMRegAnchor _
;
1605 // setVar() should only be called after getVarEnv() has been called
1606 // to create a varEnv
1607 ActRec
*fp
= getFP();
1609 if (fp
->skipFrame()) {
1610 fp
= getPrevVMState(fp
);
1612 assert(!fp
->hasInvName());
1613 assert(!fp
->hasExtraArgs());
1614 assert(fp
->m_varEnv
!= nullptr);
1616 fp
->m_varEnv
->bind(name
, v
);
1618 fp
->m_varEnv
->set(name
, v
);
1622 Array
VMExecutionContext::getLocalDefinedVariables(int frame
) {
1623 Transl::VMRegAnchor _
;
1624 ActRec
*fp
= getFP();
1625 for (; frame
> 0; --frame
) {
1627 fp
= getPrevVMState(fp
);
1630 return Array::Create();
1632 assert(!fp
->hasInvName());
1633 if (fp
->hasVarEnv()) {
1634 return fp
->m_varEnv
->getDefinedVariables();
1636 Array ret
= Array::Create();
1637 const Func
*func
= fp
->m_func
;
1638 for (Id id
= 0; id
< func
->numNamedLocals(); ++id
) {
1639 TypedValue
* ptv
= frame_local(fp
, id
);
1640 if (ptv
->m_type
== KindOfUninit
) {
1643 Variant
name(func
->localVarName(id
)->data());
1644 ret
.add(name
, tvAsVariant(ptv
));
1649 void VMExecutionContext::shuffleMagicArgs(ActRec
* ar
) {
1650 // We need to put this where the first argument is
1651 StringData
* invName
= ar
->getInvName();
1652 int nargs
= ar
->numArgs();
1653 ar
->setVarEnv(nullptr);
1654 assert(!ar
->hasVarEnv() && !ar
->hasInvName());
1655 // We need to make an array containing all the arguments passed by the
1656 // caller and put it where the second argument is
1657 ArrayData
* argArray
= pack_args_into_array(ar
, nargs
);
1658 argArray
->incRefCount();
1659 // Remove the arguments from the stack
1660 for (int i
= 0; i
< nargs
; ++i
) {
1663 // Move invName to where the first argument belongs, no need
1664 // to incRef/decRef since we are transferring ownership
1665 m_stack
.pushStringNoRc(invName
);
1666 // Move argArray to where the second argument belongs. We've already
1667 // incReffed the array above so we don't need to do it here.
1668 m_stack
.pushArrayNoRc(argArray
);
1673 static inline void checkStack(Stack
& stk
, const Func
* f
) {
1674 ThreadInfo
* info
= ThreadInfo::s_threadInfo
.getNoCheck();
1675 // Check whether func's maximum stack usage would overflow the stack.
1676 // Both native and VM stack overflows are independently possible.
1677 if (!stack_in_bounds(info
) ||
1678 stk
.wouldOverflow(f
->maxStackCells() + kStackCheckPadding
)) {
1679 TRACE(1, "Maximum VM stack depth exceeded.\n");
1680 raise_error("Stack overflow");
1684 bool VMExecutionContext::prepareFuncEntry(ActRec
*ar
, PC
& pc
) {
1685 const Func
* func
= ar
->m_func
;
1686 Offset firstDVInitializer
= InvalidAbsoluteOffset
;
1687 bool raiseMissingArgumentWarnings
= false;
1688 int nparams
= func
->numParams();
1689 if (UNLIKELY(ar
->m_varEnv
!= nullptr)) {
1691 * m_varEnv != nullptr => we have a varEnv, extraArgs, or an invName.
1693 if (ar
->hasInvName()) {
1694 // shuffleMagicArgs deals with everything. no need for
1695 // further argument munging
1696 shuffleMagicArgs(ar
);
1697 } else if (ar
->hasVarEnv()) {
1699 if (!func
->isGenerator()) {
1700 assert(func
->isPseudoMain());
1701 pushLocalsAndIterators(func
);
1702 ar
->m_varEnv
->attach(ar
);
1704 pc
= func
->getEntry();
1705 // Nothing more to do; get out
1708 assert(ar
->hasExtraArgs());
1709 assert(func
->numParams() < ar
->numArgs());
1712 int nargs
= ar
->numArgs();
1713 if (nargs
!= nparams
) {
1714 if (nargs
< nparams
) {
1715 // Push uninitialized nulls for missing arguments. Some of them may end
1716 // up getting default-initialized, but regardless, we need to make space
1717 // for them on the stack.
1718 const Func::ParamInfoVec
& paramInfo
= func
->params();
1719 for (int i
= nargs
; i
< nparams
; ++i
) {
1720 m_stack
.pushUninit();
1721 Offset dvInitializer
= paramInfo
[i
].funcletOff();
1722 if (dvInitializer
== InvalidAbsoluteOffset
) {
1723 // We wait to raise warnings until after all the locals have been
1724 // initialized. This is important because things need to be in a
1725 // consistent state in case the user error handler throws.
1726 raiseMissingArgumentWarnings
= true;
1727 } else if (firstDVInitializer
== InvalidAbsoluteOffset
) {
1728 // This is the first unpassed arg with a default value, so
1729 // this is where we'll need to jump to.
1730 firstDVInitializer
= dvInitializer
;
1734 if (func
->attrs() & AttrMayUseVV
) {
1735 // Extra parameters must be moved off the stack.
1736 const int numExtras
= nargs
- nparams
;
1737 ar
->setExtraArgs(ExtraArgs::allocateCopy((TypedValue
*)ar
- nargs
,
1739 m_stack
.ndiscard(numExtras
);
1741 // The function we're calling is not marked as "MayUseVV",
1742 // so just discard the extra arguments
1743 int numExtras
= nargs
- nparams
;
1744 for (int i
= 0; i
< numExtras
; i
++) {
1747 ar
->setNumArgs(nparams
);
1753 int nlocals
= nparams
;
1754 if (UNLIKELY(func
->isClosureBody())) {
1755 int nuse
= init_closure(ar
, m_stack
.top());
1756 // init_closure doesn't move m_stack
1757 m_stack
.nalloc(nuse
);
1762 if (LIKELY(!func
->isGenerator())) {
1764 * we only get here from callAndResume
1765 * if we failed to get a translation for
1766 * a generator's prologue
1768 pushLocalsAndIterators(func
, nlocals
);
1772 if (firstDVInitializer
!= InvalidAbsoluteOffset
) {
1773 pc
= func
->unit()->entry() + firstDVInitializer
;
1775 pc
= func
->getEntry();
1777 // cppext functions/methods have their own logic for raising
1778 // warnings for missing arguments, so we only need to do this work
1779 // for non-cppext functions/methods
1780 if (raiseMissingArgumentWarnings
&& !func
->info()) {
1781 // need to sync m_pc to pc for backtraces/re-entry
1783 const Func::ParamInfoVec
& paramInfo
= func
->params();
1784 for (int i
= ar
->numArgs(); i
< nparams
; ++i
) {
1785 Offset dvInitializer
= paramInfo
[i
].funcletOff();
1786 if (dvInitializer
== InvalidAbsoluteOffset
) {
1787 const char* name
= func
->name()->data();
1789 raise_warning(Strings::MISSING_ARGUMENT
, name
, i
);
1791 raise_warning(Strings::MISSING_ARGUMENTS
, name
, nparams
, i
);
1799 void VMExecutionContext::syncGdbState() {
1800 if (RuntimeOption::EvalJit
&& !RuntimeOption::EvalJitNoGdb
) {
1801 tx64
->m_debugInfo
.debugSync();
1805 void VMExecutionContext::enterVMPrologue(ActRec
* enterFnAr
) {
1807 Stats::inc(Stats::VMEnter
);
1808 if (ThreadInfo::s_threadInfo
->m_reqInjectionData
.getJit()) {
1809 int np
= enterFnAr
->m_func
->numParams();
1810 int na
= enterFnAr
->numArgs();
1811 if (na
> np
) na
= np
+ 1;
1812 TCA start
= enterFnAr
->m_func
->getPrologue(na
);
1813 tx64
->enterTCAtProlog(enterFnAr
, start
);
1815 if (prepareFuncEntry(enterFnAr
, m_pc
)) {
1816 enterVMWork(enterFnAr
);
1821 void VMExecutionContext::enterVMWork(ActRec
* enterFnAr
) {
1822 TCA start
= nullptr;
1824 if (!EventHook::FunctionEnter(enterFnAr
, EventHook::NormalFunc
)) return;
1825 checkStack(m_stack
, enterFnAr
->m_func
);
1826 start
= enterFnAr
->m_func
->getFuncBody();
1828 Stats::inc(Stats::VMEnter
);
1829 if (ThreadInfo::s_threadInfo
->m_reqInjectionData
.getJit()) {
1830 (void) curUnit()->offsetOf(m_pc
); /* assert */
1833 tx64
->enterTCAfterProlog(start
);
1835 SrcKey
sk(curFunc(), m_pc
);
1836 tx64
->enterTCAtSrcKey(sk
);
1843 // Enumeration codes for the handling of VM exceptions.
1845 EXCEPTION_START
= 0,
1846 EXCEPTION_PROPAGATE
,
1851 static void pushFault(Fault::Type t
, Exception
* e
, const Object
* o
= nullptr) {
1852 FTRACE(1, "pushing new fault: {} {} {}\n",
1853 t
== Fault::UserException
? "[user exception]" : "[cpp exception]",
1856 VMExecutionContext
* ec
= g_vmContext
;
1858 fault
.m_faultType
= t
;
1859 if (t
== Fault::UserException
) {
1862 fault
.m_userException
= o
->get();
1863 fault
.m_userException
->incRefCount();
1865 fault
.m_cppException
= e
;
1867 ec
->m_faults
.push_back(fault
);
1870 static int exception_handler() {
1874 } catch (const Object
& e
) {
1875 pushFault(Fault::UserException
, nullptr, &e
);
1876 longJmpType
= g_vmContext
->hhvmPrepareThrow();
1877 } catch (VMSwitchModeException
&e
) {
1878 longJmpType
= g_vmContext
->switchMode(e
.unwindBuiltin());
1879 } catch (Exception
&e
) {
1880 pushFault(Fault::CppException
, e
.clone());
1881 longJmpType
= g_vmContext
->hhvmPrepareThrow();
1882 } catch (std::exception
& e
) {
1883 pushFault(Fault::CppException
,
1884 new Exception("unexpected %s: %s", typeid(e
).name(), e
.what()));
1885 longJmpType
= g_vmContext
->hhvmPrepareThrow();
1887 pushFault(Fault::CppException
,
1888 new Exception("unknown exception"));
1889 longJmpType
= g_vmContext
->hhvmPrepareThrow();
1894 void VMExecutionContext::enterVM(TypedValue
* retval
, ActRec
* ar
) {
1896 ar
->m_savedRip
= (uintptr_t)tx64
->getCallToExit();
1897 assert(isReturnHelper(ar
->m_savedRip
));
1899 DEBUG_ONLY
int faultDepth
= m_faults
.size();
1901 if (debug
) assert(m_faults
.size() == faultDepth
);
1905 * TODO(#1343044): some of the structure of this code dates back to
1906 * when it used to be setjmp/longjmp based. It is probable we could
1907 * simplify it a little more, and maybe combine some of the logic
1908 * with exception_handler().
1910 * When an exception is propagating, each nesting of the VM is
1911 * responsible for unwinding its portion of the execution stack, and
1912 * finding user handlers if it is a catchable exception.
1914 * This try/catch is where all this logic is centered. The actual
1915 * unwinding happens under hhvmPrepareThrow, which returns a new
1916 * "jumpCode" here to indicate what to do next. Either we'll enter
1917 * the VM loop again at a user error/fault handler, or propagate the
1918 * exception to a less-nested VM.
1920 int jumpCode
= EXCEPTION_START
;
1924 case EXCEPTION_START
:
1925 if (m_fp
&& !ar
->m_varEnv
) {
1926 enterVMPrologue(ar
);
1928 if (prepareFuncEntry(ar
, m_pc
)) {
1933 case EXCEPTION_PROPAGATE
:
1934 // Jump out of this try/catch before throwing.
1936 case EXCEPTION_DEBUGGER
:
1937 // Triggered by switchMode() to switch VM mode
1938 // do nothing but reenter the VM with same VM stack
1940 case EXCEPTION_RESUMEVM
:
1946 } catch (const VMPrepareUnwind
&) {
1947 // This is slightly different from VMPrepareThrow, because we need
1948 // to re-raise the exception as if it came from the same offset.
1949 Fault fault
= m_faults
.back();
1950 Offset faultPC
= fault
.m_savedRaiseOffset
;
1951 FTRACE(1, "unwind: restoring offset {}\n", faultPC
);
1952 assert(faultPC
!= kInvalidOffset
);
1953 fault
.m_savedRaiseOffset
= kInvalidOffset
;
1954 UnwindStatus unwindType
= m_stack
.unwindFrame(m_fp
, faultPC
, m_pc
, fault
);
1955 jumpCode
= handleUnwind(unwindType
);
1958 assert(tl_regState
== REGSTATE_CLEAN
);
1959 jumpCode
= exception_handler();
1960 assert(jumpCode
!= EXCEPTION_START
);
1964 *retval
= *m_stack
.topTV();
1969 assert(m_faults
.size() > 0);
1970 Fault fault
= m_faults
.back();
1971 m_faults
.pop_back();
1972 switch (fault
.m_faultType
) {
1973 case Fault::UserException
: {
1974 Object obj
= fault
.m_userException
;
1975 fault
.m_userException
->decRefCount();
1978 case Fault::CppException
:
1979 // throwException() will take care of deleting heap-allocated
1980 // exception object for us
1981 fault
.m_cppException
->throwException();
1989 void VMExecutionContext::reenterVM(TypedValue
* retval
,
1991 TypedValue
* savedSP
) {
1994 VMState savedVM
= { getPC(), getFP(), m_firstAR
, savedSP
};
1995 TRACE(3, "savedVM: %p %p %p %p\n", m_pc
, m_fp
, m_firstAR
, savedSP
);
1996 pushVMState(savedVM
, ar
);
1997 assert(m_nestedVMs
.size() >= 1);
1999 enterVM(retval
, ar
);
2005 TRACE(1, "Reentry: exit fp %p pc %p\n", m_fp
, m_pc
);
2008 int VMExecutionContext::switchMode(bool unwindBuiltin
) {
2009 if (unwindBuiltin
) {
2010 // from Jit calling a builtin, should unwind a frame, and push a
2011 // return value on stack
2012 tx64
->sync(); // just to set tl_regState
2013 unwindBuiltinFrame();
2016 return EXCEPTION_DEBUGGER
;
2019 void VMExecutionContext::invokeFunc(TypedValue
* retval
,
2022 ObjectData
* this_
/* = NULL */,
2023 Class
* cls
/* = NULL */,
2024 VarEnv
* varEnv
/* = NULL */,
2025 StringData
* invName
/* = NULL */,
2026 InvokeFlags flags
/* = InvokeNormal */) {
2029 // If this is a regular function, this_ and cls must be NULL
2030 assert(f
->preClass() || f
->isPseudoMain() || (!this_
&& !cls
));
2031 // If this is a method, either this_ or cls must be non-NULL
2032 assert(!f
->preClass() || (this_
|| cls
));
2033 // If this is a static method, this_ must be NULL
2034 assert(!(f
->attrs() & AttrStatic
&& !f
->isClosureBody()) ||
2036 // invName should only be non-NULL if we are calling __call or
2038 assert(!invName
|| f
->name()->isame(s___call
.get()) ||
2039 f
->name()->isame(s___callStatic
.get()));
2040 // If a variable environment is being inherited then params must be empty
2041 assert(!varEnv
|| params
.empty());
2045 bool isMagicCall
= (invName
!= nullptr);
2047 if (this_
!= nullptr) {
2048 this_
->incRefCount();
2050 Cell
* savedSP
= m_stack
.top();
2052 if (f
->numParams() > kStackCheckReenterPadding
- kNumActRecCells
) {
2053 checkStack(m_stack
, f
);
2056 if (flags
& InvokePseudoMain
) {
2057 assert(f
->isPseudoMain() && !params
.get());
2058 Unit
* toMerge
= f
->unit();
2060 if (toMerge
->isMergeOnly()) {
2061 *retval
= *toMerge
->getMainReturn();
2066 ActRec
* ar
= m_stack
.allocA();
2075 ar
->setThis(nullptr);
2080 ar
->initNumArgs(params
.size());
2082 ar
->setVarEnv(varEnv
);
2085 if (m_fp
== nullptr) {
2086 TRACE(1, "Reentry: enter %s(%p) from top-level\n",
2087 f
->name()->data(), ar
);
2089 TRACE(1, "Reentry: enter %s(pc %p ar %p) from %s(%p)\n",
2090 f
->name()->data(), m_pc
, ar
,
2091 m_fp
->m_func
? m_fp
->m_func
->name()->data() : "unknownBuiltin", m_fp
);
2095 ArrayData
*arr
= params
.get();
2097 // Put the method name into the location of the first parameter. We
2098 // are transferring ownership, so no need to incRef/decRef here.
2099 m_stack
.pushStringNoRc(invName
);
2100 // Put array of arguments into the location of the second parameter
2101 m_stack
.pushArray(arr
);
2103 const int numParams
= f
->numParams();
2104 const int numExtraArgs
= arr
->size() - numParams
;
2105 ExtraArgs
* extraArgs
= nullptr;
2106 if (numExtraArgs
> 0 && (f
->attrs() & AttrMayUseVV
)) {
2107 extraArgs
= ExtraArgs::allocateUninit(numExtraArgs
);
2108 ar
->setExtraArgs(extraArgs
);
2111 for (ssize_t i
= arr
->iter_begin();
2112 i
!= ArrayData::invalid_index
;
2113 i
= arr
->iter_advance(i
), ++paramId
) {
2114 TypedValue
*from
= arr
->nvGetValueRef(i
);
2116 if (LIKELY(paramId
< numParams
)) {
2117 to
= m_stack
.allocTV();
2119 if (!(f
->attrs() & AttrMayUseVV
)) {
2120 // Discard extra arguments, since the function cannot
2121 // possibly use them.
2122 assert(extraArgs
== nullptr);
2123 ar
->setNumArgs(numParams
);
2126 assert(extraArgs
!= nullptr && numExtraArgs
> 0);
2127 // VarEnv expects the extra args to be in "reverse" order
2128 // (i.e. the last extra arg has the lowest address)
2129 to
= extraArgs
->getExtraArg(paramId
- numParams
);
2132 if (LIKELY(!f
->byRef(paramId
))) {
2133 if (to
->m_type
== KindOfRef
) {
2136 } else if (!(flags
& InvokeIgnoreByRefErrors
) &&
2137 (from
->m_type
!= KindOfRef
||
2138 from
->m_data
.pref
->_count
== 2)) {
2139 raise_warning("Parameter %d to %s() expected to be "
2140 "a reference, value given",
2141 paramId
+ 1, f
->fullName()->data());
2142 if (skipCufOnInvalidParams
) {
2144 int n
= paramId
>= numParams
? paramId
- numParams
+ 1 : 0;
2145 ExtraArgs::deallocate(extraArgs
, n
);
2146 ar
->m_varEnv
= nullptr;
2149 while (paramId
>= 0) {
2154 tvWriteNull(retval
);
2162 reenterVM(retval
, ar
, savedSP
);
2164 assert(m_nestedVMs
.size() == 0);
2165 enterVM(retval
, ar
);
2169 void VMExecutionContext::invokeFuncFew(TypedValue
* retval
,
2172 StringData
* invName
,
2173 int argc
, TypedValue
* argv
) {
2176 // If this is a regular function, this_ and cls must be NULL
2177 assert(f
->preClass() || !thisOrCls
);
2178 // If this is a method, either this_ or cls must be non-NULL
2179 assert(!f
->preClass() || thisOrCls
);
2180 // If this is a static method, this_ must be NULL
2181 assert(!(f
->attrs() & AttrStatic
&& !f
->isClosureBody()) ||
2182 !ActRec::decodeThis(thisOrCls
));
2183 // invName should only be non-NULL if we are calling __call or
2185 assert(!invName
|| f
->name()->isame(s___call
.get()) ||
2186 f
->name()->isame(s___callStatic
.get()));
2190 if (ObjectData
* thiz
= ActRec::decodeThis(thisOrCls
)) {
2191 thiz
->incRefCount();
2193 Cell
* savedSP
= m_stack
.top();
2194 if (argc
> kStackCheckReenterPadding
- kNumActRecCells
) {
2195 checkStack(m_stack
, f
);
2197 ActRec
* ar
= m_stack
.allocA();
2201 ar
->m_this
= (ObjectData
*)thisOrCls
;
2202 ar
->initNumArgs(argc
);
2203 if (UNLIKELY(invName
!= nullptr)) {
2204 ar
->setInvName(invName
);
2206 ar
->m_varEnv
= nullptr;
2210 if (m_fp
== nullptr) {
2211 TRACE(1, "Reentry: enter %s(%p) from top-level\n",
2212 f
->name()->data(), ar
);
2214 TRACE(1, "Reentry: enter %s(pc %p ar %p) from %s(%p)\n",
2215 f
->name()->data(), m_pc
, ar
,
2216 m_fp
->m_func
? m_fp
->m_func
->name()->data() : "unknownBuiltin", m_fp
);
2220 for (int i
= 0; i
< argc
; i
++) {
2221 *m_stack
.allocTV() = *argv
++;
2225 reenterVM(retval
, ar
, savedSP
);
2227 assert(m_nestedVMs
.size() == 0);
2228 enterVM(retval
, ar
);
2232 void VMExecutionContext::invokeContFunc(const Func
* f
,
2234 TypedValue
* param
/* = NULL */) {
2240 this_
->incRefCount();
2242 Cell
* savedSP
= m_stack
.top();
2244 // no need to check stack due to ReenterPadding
2245 assert(kStackCheckReenterPadding
- kNumActRecCells
>= 1);
2247 ActRec
* ar
= m_stack
.allocA();
2251 ar
->initNumArgs(param
!= nullptr ? 1 : 0);
2253 ar
->setVarEnv(nullptr);
2255 if (param
!= nullptr) {
2256 tvDup(param
, m_stack
.allocTV());
2260 reenterVM(&retval
, ar
, savedSP
);
2261 // Codegen for generator functions guarantees that they will return null
2262 assert(IS_NULL_TYPE(retval
.m_type
));
2265 void VMExecutionContext::invokeUnit(TypedValue
* retval
, Unit
* unit
) {
2266 Func
* func
= unit
->getMain();
2267 invokeFunc(retval
, func
, null_array
, nullptr, nullptr,
2268 m_globalVarEnv
, nullptr, InvokePseudoMain
);
2271 void VMExecutionContext::unwindBuiltinFrame() {
2272 // Unwind the frame for a builtin. Currently only used for
2273 // hphpd_break and fb_enable_code_coverage
2274 assert(m_fp
->m_func
->info());
2275 assert(m_fp
->m_func
->name()->isame(s_hphpd_break
.get()) ||
2276 m_fp
->m_func
->name()->isame(s_fb_enable_code_coverage
.get()));
2277 // Free any values that may be on the eval stack
2278 TypedValue
*evalTop
= (TypedValue
*)getFP();
2279 while (m_stack
.topTV() < evalTop
) {
2282 // Free the locals and VarEnv if there is one
2283 frame_free_locals_inl(m_fp
, m_fp
->m_func
->numLocals());
2284 // Tear down the frame
2286 ActRec
* sfp
= getPrevVMState(m_fp
, &pc
);
2289 m_pc
= m_fp
->m_func
->unit()->at(pc
);
2290 m_stack
.discardAR();
2293 int VMExecutionContext::hhvmPrepareThrow() {
2294 Fault
& fault
= m_faults
.back();
2296 TRACE(2, "hhvmPrepareThrow: %p(\"%s\") {\n", m_fp
,
2297 m_fp
->m_func
->name()->data());
2298 UnwindStatus unwindType
;
2299 unwindType
= m_stack
.unwindFrame(m_fp
, pcOff(),
2301 return handleUnwind(unwindType
);
2305 * Given a pointer to a VM frame, returns the previous VM frame in the call
2306 * stack. This function will also pass back by reference the previous PC (if
2307 * prevPc is non-null) and the previous SP (if prevSp is non-null).
2309 * If there is no previous VM frame, this function returns NULL and does not
2310 * set prevPc and prevSp.
2312 ActRec
* VMExecutionContext::getPrevVMState(const ActRec
* fp
,
2313 Offset
* prevPc
/* = NULL */,
2314 TypedValue
** prevSp
/* = NULL */,
2315 bool* fromVMEntry
/* = NULL */) {
2316 if (fp
== nullptr) {
2319 ActRec
* prevFp
= fp
->arGetSfp();
2322 if (UNLIKELY(fp
->m_func
->isGenerator())) {
2323 *prevSp
= (TypedValue
*)prevFp
- prevFp
->m_func
->numSlotsInFrame();
2325 *prevSp
= (TypedValue
*)&fp
[1];
2328 if (prevPc
) *prevPc
= prevFp
->m_func
->base() + fp
->m_soff
;
2329 if (fromVMEntry
) *fromVMEntry
= false;
2332 // Linear search from end of m_nestedVMs. In practice, we're probably
2333 // looking for something recently pushed.
2334 int i
= m_nestedVMs
.size() - 1;
2335 for (; i
>= 0; --i
) {
2336 if (m_nestedVMs
[i
].m_entryFP
== fp
) break;
2338 if (i
== -1) return nullptr;
2339 const VMState
& vmstate
= m_nestedVMs
[i
].m_savedState
;
2340 prevFp
= vmstate
.fp
;
2342 assert(prevFp
->m_func
->unit());
2343 if (prevSp
) *prevSp
= vmstate
.sp
;
2344 if (prevPc
) *prevPc
= prevFp
->m_func
->unit()->offsetOf(vmstate
.pc
);
2345 if (fromVMEntry
) *fromVMEntry
= true;
2349 Array
VMExecutionContext::debugBacktrace(bool skip
/* = false */,
2350 bool withSelf
/* = false */,
2351 bool withThis
/* = false */,
2353 parserFrame
/* = NULL */) {
2354 Array bt
= Array::Create();
2356 // If there is a parser frame, put it at the beginning of
2361 .set(s_file
, parserFrame
->filename
, true)
2362 .set(s_line
, parserFrame
->lineNumber
, true)
2367 Transl::VMRegAnchor _
;
2369 // If there are no VM frames, we're done
2374 ActRec
* fp
= nullptr;
2377 // Get the fp and pc of the top frame (possibly skipping one frame)
2380 fp
= getPrevVMState(getFP(), &pc
);
2382 // We skipped over the only VM frame, we're done
2387 Unit
*unit
= getFP()->m_func
->unit();
2389 pc
= unit
->offsetOf(m_pc
);
2392 // Handle the top frame
2394 // Builtins don't have a file and line number
2395 if (!fp
->m_func
->isBuiltin()) {
2396 Unit
*unit
= fp
->m_func
->unit();
2398 const char* filename
= unit
->filepath()->data();
2399 if (fp
->m_func
->originalFilename()) {
2400 filename
= fp
->m_func
->originalFilename()->data();
2405 ArrayInit
frame(parserFrame
? 4 : 2);
2406 frame
.set(s_file
, filename
, true);
2407 frame
.set(s_line
, unit
->getLineNumber(off
), true);
2409 frame
.set(s_function
, s_include
, true);
2410 frame
.set(s_args
, Array::Create(parserFrame
->filename
), true);
2412 bt
.append(frame
.toVariant());
2418 // Handle the subsequent VM frames
2420 for (ActRec
* prevFp
= getPrevVMState(fp
, &prevPc
); fp
!= nullptr;
2421 fp
= prevFp
, pc
= prevPc
, prevFp
= getPrevVMState(fp
, &prevPc
)) {
2422 // do not capture frame for HPHP only functions
2423 if (fp
->m_func
->isNoInjection()) {
2429 auto const curUnit
= fp
->m_func
->unit();
2430 auto const curOp
= *reinterpret_cast<const Opcode
*>(curUnit
->at(pc
));
2431 auto const isReturning
= curOp
== OpRetC
|| curOp
== OpRetV
;
2433 // Builtins and generators don't have a file and line number
2434 if (prevFp
&& !prevFp
->m_func
->isBuiltin() && !fp
->m_func
->isGenerator()) {
2435 auto const prevUnit
= prevFp
->m_func
->unit();
2436 auto prevFile
= prevUnit
->filepath();
2437 if (prevFp
->m_func
->originalFilename()) {
2438 prevFile
= prevFp
->m_func
->originalFilename();
2441 frame
.set(s_file
, const_cast<StringData
*>(prevFile
), true);
2443 // In the normal method case, the "saved pc" for line number printing is
2444 // pointing at the cell conversion (Unbox/Pop) instruction, not the call
2445 // itself. For multi-line calls, this instruction is associated with the
2446 // subsequent line which results in an off-by-n. We're subtracting one
2447 // in order to look up the line associated with the FCall/FCallArray
2448 // instruction. Exception handling and the other opcodes (ex. BoxR)
2449 // already do the right thing. The emitter associates object access with
2450 // the subsequent expression and this would be difficult to modify.
2451 auto const opAtPrevPc
=
2452 *reinterpret_cast<const Opcode
*>(prevUnit
->at(prevPc
));
2453 Offset pcAdjust
= 0;
2454 if (opAtPrevPc
== OpPopR
|| opAtPrevPc
== OpUnboxR
) {
2458 prevFp
->m_func
->unit()->getLineNumber(prevPc
- pcAdjust
),
2462 // check for include
2463 String funcname
= const_cast<StringData
*>(fp
->m_func
->name());
2464 if (fp
->m_func
->isGenerator()) {
2465 // retrieve the original function name from the inner continuation
2466 TypedValue
* tv
= frame_local(fp
, 0);
2467 assert(tv
->m_type
== HPHP::KindOfObject
);
2468 funcname
= static_cast<c_Continuation
*>(
2469 tv
->m_data
.pobj
)->t_getorigfuncname();
2472 if (fp
->m_func
->isClosureBody()) {
2473 static StringData
* s_closure_label
=
2474 StringData::GetStaticString("{closure}");
2475 funcname
= s_closure_label
;
2478 // check for pseudomain
2479 if (funcname
->empty()) {
2480 if (!prevFp
) continue;
2481 funcname
= s_include
;
2484 frame
.set(s_function
, funcname
, true);
2486 if (!funcname
.same(s_include
)) {
2487 // Closures have an m_this but they aren't in object context
2488 Class
* ctx
= arGetContextClass(fp
);
2489 if (ctx
!= nullptr && !fp
->m_func
->isClosureBody()) {
2490 frame
.set(s_class
, ctx
->name()->data(), true);
2491 if (fp
->hasThis() && !isReturning
) {
2493 frame
.set(s_object
, Object(fp
->getThis()), true);
2495 frame
.set(s_type
, "->", true);
2497 frame
.set(s_type
, "::", true);
2502 Array args
= Array::Create();
2503 if (funcname
.same(s_include
)) {
2505 args
.append(const_cast<StringData
*>(curUnit
->filepath()));
2506 frame
.set(s_args
, args
, true);
2508 } else if (!RuntimeOption::EnableArgsInBacktraces
|| isReturning
) {
2509 // Provide an empty 'args' array to be consistent with hphpc
2510 frame
.set(s_args
, args
, true);
2512 int nparams
= fp
->m_func
->numParams();
2513 int nargs
= fp
->numArgs();
2514 /* builtin extra args are not stored in varenv */
2515 if (nargs
<= nparams
) {
2516 for (int i
= 0; i
< nargs
; i
++) {
2517 TypedValue
*arg
= frame_local(fp
, i
);
2518 args
.append(tvAsVariant(arg
));
2522 for (i
= 0; i
< nparams
; i
++) {
2523 TypedValue
*arg
= frame_local(fp
, i
);
2524 args
.append(tvAsVariant(arg
));
2526 for (; i
< nargs
; i
++) {
2527 TypedValue
*arg
= fp
->getExtraArg(i
- nparams
);
2528 args
.append(tvAsVariant(arg
));
2531 frame
.set(s_args
, args
, true);
2534 bt
.append(frame
.toVariant());
2540 MethodInfoVM::~MethodInfoVM() {
2541 for (std::vector
<const ClassInfo::ParameterInfo
*>::iterator it
=
2542 parameters
.begin(); it
!= parameters
.end(); ++it
) {
2543 if ((*it
)->value
!= nullptr) {
2544 free((void*)(*it
)->value
);
2549 ClassInfoVM::~ClassInfoVM() {
2550 destroyMembers(m_methodsVec
);
2551 destroyMapValues(m_properties
);
2552 destroyMapValues(m_constants
);
2555 Array
VMExecutionContext::getUserFunctionsInfo() {
2556 // Return an array of all user-defined function names. This method is used to
2557 // support get_defined_functions().
2558 return Unit::getUserFunctions();
2561 Array
VMExecutionContext::getConstantsInfo() {
2562 // Return an array of all defined constant:value pairs. This method is used
2563 // to support get_defined_constants().
2564 return Array::Create();
2567 const ClassInfo::MethodInfo
* VMExecutionContext::findFunctionInfo(
2569 StringIMap
<AtomicSmartPtr
<MethodInfoVM
> >::iterator it
=
2570 m_functionInfos
.find(name
);
2571 if (it
== m_functionInfos
.end()) {
2572 Func
* func
= Unit::loadFunc(name
.get());
2573 if (func
== nullptr || func
->builtinFuncPtr()) {
2576 AtomicSmartPtr
<MethodInfoVM
> &m
= m_functionInfos
[name
];
2577 m
= new MethodInfoVM();
2578 func
->getFuncInfo(m
.get());
2581 return it
->second
.get();
2585 const ClassInfo
* VMExecutionContext::findClassInfo(CStrRef name
) {
2586 if (name
->empty()) return nullptr;
2587 StringIMap
<AtomicSmartPtr
<ClassInfoVM
> >::iterator it
=
2588 m_classInfos
.find(name
);
2589 if (it
== m_classInfos
.end()) {
2590 Class
* cls
= Unit::lookupClass(name
.get());
2591 if (cls
== nullptr) return nullptr;
2592 if (cls
->clsInfo()) return cls
->clsInfo();
2593 if (cls
->attrs() & (AttrInterface
| AttrTrait
)) {
2594 // If the specified name matches with something that is not formally
2595 // a class, return NULL
2598 AtomicSmartPtr
<ClassInfoVM
> &c
= m_classInfos
[name
];
2599 c
= new ClassInfoVM();
2600 cls
->getClassInfo(c
.get());
2603 return it
->second
.get();
2607 const ClassInfo
* VMExecutionContext::findInterfaceInfo(CStrRef name
) {
2608 StringIMap
<AtomicSmartPtr
<ClassInfoVM
> >::iterator it
=
2609 m_interfaceInfos
.find(name
);
2610 if (it
== m_interfaceInfos
.end()) {
2611 Class
* cls
= Unit::lookupClass(name
.get());
2612 if (cls
== nullptr) return nullptr;
2613 if (cls
->clsInfo()) return cls
->clsInfo();
2614 if (!(cls
->attrs() & AttrInterface
)) {
2615 // If the specified name matches with something that is not formally
2616 // an interface, return NULL
2619 AtomicSmartPtr
<ClassInfoVM
> &c
= m_interfaceInfos
[name
];
2620 c
= new ClassInfoVM();
2621 cls
->getClassInfo(c
.get());
2624 return it
->second
.get();
2628 const ClassInfo
* VMExecutionContext::findTraitInfo(CStrRef name
) {
2629 StringIMap
<AtomicSmartPtr
<ClassInfoVM
> >::iterator it
=
2630 m_traitInfos
.find(name
);
2631 if (it
!= m_traitInfos
.end()) {
2632 return it
->second
.get();
2634 Class
* cls
= Unit::lookupClass(name
.get());
2635 if (cls
== nullptr) return nullptr;
2636 if (cls
->clsInfo()) return cls
->clsInfo();
2637 if (!(cls
->attrs() & AttrTrait
)) {
2640 AtomicSmartPtr
<ClassInfoVM
> &classInfo
= m_traitInfos
[name
];
2641 classInfo
= new ClassInfoVM();
2642 cls
->getClassInfo(classInfo
.get());
2643 return classInfo
.get();
2646 const ClassInfo::ConstantInfo
* VMExecutionContext::findConstantInfo(
2648 TypedValue
* tv
= Unit::lookupCns(name
.get());
2649 if (tv
== nullptr) {
2652 ConstInfoMap::const_iterator it
= m_constInfo
.find(name
.get());
2653 if (it
!= m_constInfo
.end()) {
2656 StringData
* key
= StringData::GetStaticString(name
.get());
2657 ClassInfo::ConstantInfo
* ci
= new ClassInfo::ConstantInfo();
2658 ci
->name
= *(const String
*)&key
;
2661 ci
->setValue(tvAsCVarRef(tv
));
2662 m_constInfo
[key
] = ci
;
2666 HPHP::Eval::PhpFile
* VMExecutionContext::lookupPhpFile(StringData
* path
,
2667 const char* currentDir
,
2668 bool* initial_opt
) {
2670 bool &initial
= initial_opt
? *initial_opt
: init
;
2674 String spath
= Eval::resolveVmInclude(path
, currentDir
, &s
);
2675 if (spath
.isNull()) return nullptr;
2677 // Check if this file has already been included.
2678 EvaledFilesMap::const_iterator it
= m_evaledFiles
.find(spath
.get());
2679 HPHP::Eval::PhpFile
* efile
= nullptr;
2680 if (it
!= m_evaledFiles
.end()) {
2681 // We found it! Return the unit.
2684 if (!initial_opt
) efile
->incRef();
2687 // We didn't find it, so try the realpath.
2688 bool alreadyResolved
=
2689 RuntimeOption::RepoAuthoritative
||
2690 (!RuntimeOption::CheckSymLink
&& (spath
[0] == '/'));
2691 bool hasRealpath
= false;
2693 if (!alreadyResolved
) {
2694 std::string rp
= StatCache::realpath(spath
.data());
2695 if (rp
.size() != 0) {
2696 rpath
= NEW(StringData
)(rp
.data(), rp
.size(), CopyString
);
2697 if (!rpath
.same(spath
)) {
2699 it
= m_evaledFiles
.find(rpath
.get());
2700 if (it
!= m_evaledFiles
.end()) {
2701 // We found it! Update the mapping for spath and
2704 m_evaledFiles
[spath
.get()] = efile
;
2705 spath
.get()->incRefCount();
2708 if (!initial_opt
) efile
->incRef();
2714 // This file hasn't been included yet, so we need to parse the file
2715 efile
= HPHP::Eval::FileRepository::checkoutFile(
2716 hasRealpath
? rpath
.get() : spath
.get(), s
);
2717 assert(!efile
|| efile
->getRef() > 0);
2718 if (efile
&& initial_opt
) {
2719 // if initial_opt is not set, this shouldnt be recorded as a
2720 // per request fetch of the file.
2721 if (Transl::TargetCache::testAndSetBit(efile
->getId())) {
2724 // if parsing was successful, update the mappings for spath and
2725 // rpath (if it exists).
2726 m_evaledFiles
[spath
.get()] = efile
;
2727 spath
.get()->incRefCount();
2728 // Don't incRef efile; checkoutFile() already counted it.
2730 m_evaledFiles
[rpath
.get()] = efile
;
2731 rpath
.get()->incRefCount();
2734 DEBUGGER_ATTACHED_ONLY(phpDebuggerFileLoadHook(efile
));
2739 Unit
* VMExecutionContext::evalInclude(StringData
* path
,
2740 const StringData
* curUnitFilePath
,
2742 namespace fs
= boost::filesystem
;
2743 HPHP::Eval::PhpFile
* efile
= nullptr;
2744 if (curUnitFilePath
) {
2745 fs::path
currentUnit(curUnitFilePath
->data());
2746 fs::path
currentDir(currentUnit
.branch_path());
2747 efile
= lookupPhpFile(path
, currentDir
.string().c_str(), initial
);
2749 efile
= lookupPhpFile(path
, "", initial
);
2752 return efile
->unit();
2757 HPHP::Unit
* VMExecutionContext::evalIncludeRoot(
2758 StringData
* path
, InclOpFlags flags
, bool* initial
) {
2759 HPHP::Eval::PhpFile
* efile
= lookupIncludeRoot(path
, flags
, initial
);
2760 return efile
? efile
->unit() : 0;
2763 HPHP::Eval::PhpFile
* VMExecutionContext::lookupIncludeRoot(StringData
* path
,
2768 if ((flags
& InclOpRelative
)) {
2769 namespace fs
= boost::filesystem
;
2770 if (!unit
) unit
= getFP()->m_func
->unit();
2771 fs::path
currentUnit(unit
->filepath()->data());
2772 fs::path
currentDir(currentUnit
.branch_path());
2773 absPath
= currentDir
.string() + '/';
2774 TRACE(2, "lookupIncludeRoot(%s): relative -> %s\n",
2778 assert(flags
& InclOpDocRoot
);
2779 absPath
= SourceRootInfo::GetCurrentPhpRoot();
2780 TRACE(2, "lookupIncludeRoot(%s): docRoot -> %s\n",
2785 absPath
+= StrNR(path
);
2787 EvaledFilesMap::const_iterator it
= m_evaledFiles
.find(absPath
.get());
2788 if (it
!= m_evaledFiles
.end()) {
2789 if (initial
) *initial
= false;
2790 if (!initial
) it
->second
->incRef();
2794 return lookupPhpFile(absPath
.get(), "", initial
);
2798 Instantiate hoistable classes and functions.
2799 If there is any more work left to do, setup a
2800 new frame ready to execute the pseudomain.
2802 return true iff the pseudomain needs to be executed.
2804 bool VMExecutionContext::evalUnit(Unit
* unit
, bool local
,
2805 PC
& pc
, int funcType
) {
2808 if (unit
->isMergeOnly()) {
2809 Stats::inc(Stats::PseudoMain_Skipped
);
2810 *m_stack
.allocTV() = *unit
->getMainReturn();
2813 Stats::inc(Stats::PseudoMain_Executed
);
2816 ActRec
* ar
= m_stack
.allocA();
2817 assert((uintptr_t)&ar
->m_func
< (uintptr_t)&ar
->m_r
);
2818 Class
* cls
= curClass();
2821 ar
->setThis(nullptr);
2822 } else if (m_fp
->hasThis()) {
2823 ObjectData
*this_
= m_fp
->getThis();
2824 this_
->incRefCount();
2826 } else if (m_fp
->hasClass()) {
2827 ar
->setClass(m_fp
->getClass());
2829 ar
->setThis(nullptr);
2831 Func
* func
= unit
->getMain(cls
);
2832 assert(!func
->info());
2833 assert(!func
->isGenerator());
2837 assert(!m_fp
->hasInvName());
2839 ar
->m_soff
= uintptr_t(m_fp
->m_func
->unit()->offsetOf(pc
) -
2840 m_fp
->m_func
->base());
2841 ar
->m_savedRip
= (uintptr_t)tx64
->getRetFromInterpretedFrame();
2842 assert(isReturnHelper(ar
->m_savedRip
));
2843 pushLocalsAndIterators(func
);
2847 if (!m_fp
->hasVarEnv()) {
2848 m_fp
->m_varEnv
= VarEnv::createLazyAttach(m_fp
);
2850 ar
->m_varEnv
= m_fp
->m_varEnv
;
2851 ar
->m_varEnv
->attach(ar
);
2854 pc
= func
->getEntry();
2856 bool ret
= EventHook::FunctionEnter(m_fp
, funcType
);
2861 CVarRef
VMExecutionContext::getEvaledArg(const StringData
* val
) {
2862 CStrRef key
= *(String
*)&val
;
2864 if (m_evaledArgs
.get()) {
2865 CVarRef arg
= m_evaledArgs
.get()->get(key
);
2866 if (&arg
!= &null_variant
) return arg
;
2868 String code
= HPHP::concat3("<?php return ", key
, ";");
2869 Unit
* unit
= compileEvalString(code
.get());
2870 assert(unit
!= nullptr);
2872 // Default arg values are not currently allowed to depend on class context.
2873 g_vmContext
->invokeFunc((TypedValue
*)&v
, unit
->getMain(),
2874 null_array
, nullptr, nullptr, nullptr, nullptr,
2876 Variant
&lv
= m_evaledArgs
.lvalAt(key
, AccessFlags::Key
);
2882 * Helper for function entry, including pseudo-main entry.
2885 VMExecutionContext::pushLocalsAndIterators(const Func
* func
,
2886 int nparams
/*= 0*/) {
2888 for (int i
= nparams
; i
< func
->numLocals(); i
++) {
2889 m_stack
.pushUninit();
2892 for (int i
= 0; i
< func
->numIterators(); i
++) {
2897 void VMExecutionContext::enqueueSharedVar(SharedVariant
* svar
) {
2898 m_freedSvars
.push_back(svar
);
2901 class FreedSVars
: public Treadmill::WorkItem
{
2904 explicit FreedSVars(SVarVector
&& svars
) : m_svars(std::move(svars
)) {}
2905 virtual void operator()() {
2906 for (auto it
= m_svars
.begin(); it
!= m_svars
.end(); it
++) {
2912 void VMExecutionContext::treadmillSharedVars() {
2913 Treadmill::WorkItem::enqueue(new FreedSVars(std::move(m_freedSvars
)));
2916 void VMExecutionContext::destructObjects() {
2917 if (UNLIKELY(RuntimeOption::EnableObjDestructCall
)) {
2918 while (!m_liveBCObjs
.empty()) {
2919 ObjectData
* o
= *m_liveBCObjs
.begin();
2920 Instance
* instance
= static_cast<Instance
*>(o
);
2921 instance
->destruct(); // Let the instance remove the node.
2923 m_liveBCObjs
.clear();
2927 // Evaled units have a footprint in the TC and translation metadata. The
2928 // applications we care about tend to have few, short, stereotyped evals,
2929 // where the same code keeps getting eval'ed over and over again; so we
2930 // keep around units for each eval'ed string, so that the TC space isn't
2931 // wasted on each eval.
2932 typedef RankedCHM
<StringData
*, HPHP::Unit
*,
2933 StringDataHashCompare
,
2934 RankEvaledUnits
> EvaledUnitsMap
;
2935 static EvaledUnitsMap s_evaledUnits
;
2936 Unit
* VMExecutionContext::compileEvalString(StringData
* code
) {
2937 EvaledUnitsMap::accessor acc
;
2938 // Promote this to a static string; otherwise it may get swept
2940 code
= StringData::GetStaticString(code
);
2941 if (s_evaledUnits
.insert(acc
, code
)) {
2942 acc
->second
= compile_string(code
->data(), code
->size());
2947 CStrRef
VMExecutionContext::createFunction(CStrRef args
, CStrRef code
) {
2949 // It doesn't matter if there's a user function named __lambda_func; we only
2950 // use this name during parsing, and then change it to an impossible name
2951 // with a NUL byte before we merge it into the request's func map. This also
2952 // has the bonus feature that the value of __FUNCTION__ inside the created
2953 // function will match Zend. (Note: Zend will actually fatal if there's a
2954 // user function named __lambda_func when you call create_function. Huzzah!)
2955 static StringData
* oldName
= StringData::GetStaticString("__lambda_func");
2956 std::ostringstream codeStr
;
2957 codeStr
<< "<?php function " << oldName
->data()
2958 << "(" << args
.data() << ") {"
2959 << code
.data() << "}\n";
2960 StringData
* evalCode
= StringData::GetStaticString(codeStr
.str());
2961 Unit
* unit
= compile_string(evalCode
->data(), evalCode
->size());
2962 // Move the function to a different name.
2963 std::ostringstream newNameStr
;
2964 newNameStr
<< '\0' << "lambda_" << ++m_lambdaCounter
;
2965 StringData
* newName
= StringData::GetStaticString(newNameStr
.str());
2966 unit
->renameFunc(oldName
, newName
);
2967 m_createdFuncs
.push_back(unit
);
2970 // Technically we shouldn't have to eval the unit right now (it'll execute
2971 // the pseudo-main, which should be empty) and could get away with just
2972 // mergeFuncs. However, Zend does it this way, as proven by the fact that you
2973 // can inject code into the evaled unit's pseudo-main:
2975 // create_function('', '} echo "hi"; if (0) {');
2977 // We have to eval now to emulate this behavior.
2979 invokeFunc(&retval
, unit
->getMain(), null_array
,
2980 nullptr, nullptr, nullptr, nullptr,
2983 // __lambda_func will be the only hoistable function.
2984 // Any functions or closures defined in it will not be hoistable.
2985 Func
* lambda
= unit
->firstHoistable();
2986 return lambda
->nameRef();
2989 void VMExecutionContext::evalPHPDebugger(TypedValue
* retval
, StringData
*code
,
2992 // The code has "<?php" prepended already
2993 Unit
* unit
= compileEvalString(code
);
2994 if (unit
== nullptr) {
2995 raise_error("Syntax error");
2996 tvWriteNull(retval
);
3000 VarEnv
*varEnv
= nullptr;
3001 ActRec
*fp
= getFP();
3002 ActRec
*cfpSave
= nullptr;
3004 VarEnv
* vit
= nullptr;
3005 for (; frame
> 0; --frame
) {
3006 if (fp
->hasVarEnv()) {
3009 } else if (vit
!= fp
->m_varEnv
) {
3010 vit
= vit
->previous();
3012 assert(vit
== fp
->m_varEnv
);
3014 ActRec
* prevFp
= getPrevVMState(fp
);
3016 // To be safe in case we failed to get prevFp. This would mean we've
3017 // been asked to eval in a frame which is beyond the top of the stack.
3018 // This suggests the debugger client has made an error.
3023 if (!fp
->hasVarEnv()) {
3025 fp
->m_varEnv
= VarEnv::createLazyAttach(fp
);
3027 const bool skipInsert
= true;
3028 fp
->m_varEnv
= VarEnv::createLazyAttach(fp
, skipInsert
);
3029 // Slide it in front of the VarEnv most recently above it.
3030 fp
->m_varEnv
->setPrevious(vit
->previous());
3031 vit
->setPrevious(fp
->m_varEnv
);
3034 varEnv
= fp
->m_varEnv
;
3035 cfpSave
= varEnv
->getCfp();
3037 ObjectData
*this_
= nullptr;
3038 // NB: the ActRec and function within the AR may have different classes. The
3039 // class in the ActRec is the type used when invoking the function (i.e.,
3040 // Derived in Derived::Foo()) while the class obtained from the function is
3041 // the type that declared the function Foo, which may be Base. We need both
3042 // the class to match any object that this function may have been invoked on,
3043 // and we need the class from the function execution is stopped in.
3044 Class
*frameClass
= nullptr;
3045 Class
*functionClass
= nullptr;
3047 if (fp
->hasThis()) {
3048 this_
= fp
->getThis();
3049 } else if (fp
->hasClass()) {
3050 frameClass
= fp
->getClass();
3052 functionClass
= fp
->m_func
->cls();
3053 phpDebuggerEvalHook(fp
->m_func
);
3056 const static StaticString
s_cppException("Hit an exception");
3057 const static StaticString
s_phpException("Hit a php exception");
3058 const static StaticString
s_exit("Hit exit");
3059 const static StaticString
s_fatal("Hit fatal");
3061 // Invoke the given PHP, possibly specialized to match the type of the
3062 // current function on the stack, optionally passing a this pointer or
3063 // class used to execute the current function.
3064 invokeFunc(retval
, unit
->getMain(functionClass
), null_array
,
3065 this_
, frameClass
, varEnv
, nullptr, InvokePseudoMain
);
3066 } catch (FatalErrorException
&e
) {
3067 g_vmContext
->write(s_fatal
);
3068 g_vmContext
->write(" : ");
3069 g_vmContext
->write(e
.getMessage().c_str());
3070 g_vmContext
->write("\n");
3071 g_vmContext
->write(ExtendedLogger::StringOfStackTrace(e
.getBackTrace()));
3072 } catch (ExitException
&e
) {
3073 g_vmContext
->write(s_exit
.data());
3074 g_vmContext
->write(" : ");
3075 std::ostringstream os
;
3076 os
<< ExitException::ExitCode
;
3077 g_vmContext
->write(os
.str());
3078 } catch (Eval::DebuggerException
&e
) {
3080 varEnv
->setCfp(cfpSave
);
3083 } catch (Exception
&e
) {
3084 g_vmContext
->write(s_cppException
.data());
3085 g_vmContext
->write(" : ");
3086 g_vmContext
->write(e
.getMessage().c_str());
3087 ExtendedException
* ee
= dynamic_cast<ExtendedException
*>(&e
);
3089 g_vmContext
->write("\n");
3091 ExtendedLogger::StringOfStackTrace(ee
->getBackTrace()));
3093 } catch (Object
&e
) {
3094 g_vmContext
->write(s_phpException
.data());
3095 g_vmContext
->write(" : ");
3096 g_vmContext
->write(e
->t___tostring().data());
3098 g_vmContext
->write(s_cppException
.data());
3102 // The debugger eval frame may have attached to the VarEnv from a
3103 // frame that was not the top frame, so we need to manually set
3104 // cfp back to what it was before
3105 varEnv
->setCfp(cfpSave
);
3109 void VMExecutionContext::enterDebuggerDummyEnv() {
3110 static Unit
* s_debuggerDummy
= nullptr;
3111 if (!s_debuggerDummy
) {
3112 s_debuggerDummy
= compile_string("<?php?>", 7);
3114 VarEnv
* varEnv
= m_topVarEnv
;
3116 assert(m_stack
.count() == 0);
3117 ActRec
* ar
= m_stack
.allocA();
3118 ar
->m_func
= s_debuggerDummy
->getMain();
3119 ar
->setThis(nullptr);
3122 ar
->m_savedRip
= (uintptr_t)tx64
->getCallToExit();
3123 assert(isReturnHelper(ar
->m_savedRip
));
3125 m_pc
= s_debuggerDummy
->entry();
3128 m_fp
->setVarEnv(varEnv
);
3129 varEnv
->attach(m_fp
);
3132 void VMExecutionContext::exitDebuggerDummyEnv() {
3133 assert(m_topVarEnv
);
3134 assert(m_globalVarEnv
== m_topVarEnv
);
3135 m_globalVarEnv
->detach(getFP());
3138 // Identifies the set of return helpers that we may set m_savedRip to in an
3140 bool VMExecutionContext::isReturnHelper(uintptr_t address
) {
3141 return ((address
== (uintptr_t)tx64
->getRetFromInterpretedFrame()) ||
3142 (address
== (uintptr_t)tx64
->getRetFromInterpretedGeneratorFrame()) ||
3143 (address
== (uintptr_t)tx64
->getCallToExit()));
3146 // Walk the stack and find any return address to jitted code and bash it to
3147 // the appropriate RetFromInterpreted*Frame helper. This ensures that we don't
3148 // return into jitted code and gives the system the proper chance to interpret
3149 // blacklisted tracelets.
3150 void VMExecutionContext::preventReturnsToTC() {
3151 assert(isDebuggerAttached());
3152 if (RuntimeOption::EvalJit
) {
3153 ActRec
*ar
= getFP();
3155 if (!isReturnHelper(ar
->m_savedRip
) &&
3156 (tx64
->isValidCodeAddress((TCA
)ar
->m_savedRip
))) {
3157 TRACE_RB(2, "Replace RIP in fp %p, savedRip 0x%lx, "
3158 "func %s\n", ar
, ar
->m_savedRip
,
3159 ar
->m_func
->fullName()->data());
3160 if (ar
->m_func
->isGenerator()) {
3162 (uintptr_t)tx64
->getRetFromInterpretedGeneratorFrame();
3165 (uintptr_t)tx64
->getRetFromInterpretedFrame();
3167 assert(isReturnHelper(ar
->m_savedRip
));
3169 ar
= getPrevVMState(ar
);
3174 static inline StringData
* lookup_name(TypedValue
* key
) {
3175 return prepareKey(key
);
3178 static inline void lookup_var(ActRec
* fp
,
3182 name
= lookup_name(key
);
3183 const Func
* func
= fp
->m_func
;
3184 Id id
= func
->lookupVarId(name
);
3185 if (id
!= kInvalidId
) {
3186 val
= frame_local(fp
, id
);
3188 assert(!fp
->hasInvName());
3189 if (fp
->hasVarEnv()) {
3190 val
= fp
->m_varEnv
->lookup(name
);
3197 static inline void lookupd_var(ActRec
* fp
,
3201 name
= lookup_name(key
);
3202 const Func
* func
= fp
->m_func
;
3203 Id id
= func
->lookupVarId(name
);
3204 if (id
!= kInvalidId
) {
3205 val
= frame_local(fp
, id
);
3207 assert(!fp
->hasInvName());
3208 if (!fp
->hasVarEnv()) {
3209 fp
->m_varEnv
= VarEnv::createLazyAttach(fp
);
3211 val
= fp
->m_varEnv
->lookup(name
);
3212 if (val
== nullptr) {
3215 fp
->m_varEnv
->set(name
, &tv
);
3216 val
= fp
->m_varEnv
->lookup(name
);
3221 static inline void lookup_gbl(ActRec
* fp
,
3225 name
= lookup_name(key
);
3226 assert(g_vmContext
->m_globalVarEnv
);
3227 val
= g_vmContext
->m_globalVarEnv
->lookup(name
);
3230 static inline void lookupd_gbl(ActRec
* fp
,
3234 name
= lookup_name(key
);
3235 assert(g_vmContext
->m_globalVarEnv
);
3236 VarEnv
* varEnv
= g_vmContext
->m_globalVarEnv
;
3237 val
= varEnv
->lookup(name
);
3238 if (val
== nullptr) {
3241 varEnv
->set(name
, &tv
);
3242 val
= varEnv
->lookup(name
);
3246 static inline void lookup_sprop(ActRec
* fp
,
3253 assert(clsRef
->m_type
== KindOfClass
);
3254 name
= lookup_name(key
);
3255 Class
* ctx
= arGetContextClass(fp
);
3256 val
= clsRef
->m_data
.pcls
->getSProp(ctx
, name
, visible
, accessible
);
3259 static inline void lookupClsRef(TypedValue
* input
,
3261 bool decRef
= false) {
3262 const Class
* class_
= nullptr;
3263 if (IS_STRING_TYPE(input
->m_type
)) {
3264 class_
= Unit::loadClass(input
->m_data
.pstr
);
3265 if (class_
== nullptr) {
3266 output
->m_type
= KindOfNull
;
3267 raise_error(Strings::UNKNOWN_CLASS
, input
->m_data
.pstr
->data());
3269 } else if (input
->m_type
== KindOfObject
) {
3270 class_
= input
->m_data
.pobj
->getVMClass();
3272 output
->m_type
= KindOfNull
;
3273 raise_error("Cls: Expected string or object");
3276 tvRefcountedDecRef(input
);
3278 output
->m_data
.pcls
= const_cast<Class
*>(class_
);
3279 output
->m_type
= KindOfClass
;
3282 static UNUSED
int innerCount(const TypedValue
* tv
) {
3283 if (IS_REFCOUNTED_TYPE(tv
->m_type
)) {
3284 // We're using pref here arbitrarily; any refcounted union member works.
3285 return tv
->m_data
.pref
->_count
;
3290 static inline void ratchetRefs(TypedValue
*& result
, TypedValue
& tvRef
,
3291 TypedValue
& tvRef2
) {
3292 TRACE(5, "Ratchet: result %p(k%d c%d), ref %p(k%d c%d) ref2 %p(k%d c%d)\n",
3293 result
, result
->m_type
, innerCount(result
),
3294 &tvRef
, tvRef
.m_type
, innerCount(&tvRef
),
3295 &tvRef2
, tvRef2
.m_type
, innerCount(&tvRef2
));
3296 // Due to complications associated with ArrayAccess, it is possible to acquire
3297 // a reference as a side effect of vector operation processing. Such a
3298 // reference must be retained until after the next iteration is complete.
3299 // Therefore, move the reference from tvRef to tvRef2, so that the reference
3300 // will be released one iteration later. But only do this if tvRef was used in
3301 // this iteration, otherwise we may wipe out the last reference to something
3302 // that we need to stay alive until the next iteration.
3303 if (tvRef
.m_type
!= KindOfUninit
) {
3304 if (IS_REFCOUNTED_TYPE(tvRef2
.m_type
)) {
3306 TRACE(5, "Ratchet: decref tvref2\n");
3307 tvWriteUninit(&tvRef2
);
3310 memcpy(&tvRef2
, &tvRef
, sizeof(TypedValue
));
3311 tvWriteUninit(&tvRef
);
3312 // Update result to point to relocated reference. This can be done
3313 // unconditionally here because we maintain the invariant throughout that
3314 // either tvRef is KindOfUninit, or tvRef contains a valid object that
3315 // result points to.
3316 assert(result
== &tvRef
);
3321 #define DECLARE_MEMBERHELPER_ARGS \
3322 unsigned ndiscard; \
3324 TypedValue tvScratch; \
3325 TypedValue tvLiteral; \
3327 TypedValue tvRef2; \
3328 MemberCode mcode = MEL; \
3329 TypedValue* curMember = 0;
3330 #define DECLARE_SETHELPER_ARGS DECLARE_MEMBERHELPER_ARGS
3331 #define DECLARE_GETHELPER_ARGS \
3332 DECLARE_MEMBERHELPER_ARGS \
3335 #define MEMBERHELPERPRE_ARGS \
3336 pc, ndiscard, base, tvScratch, tvLiteral, \
3337 tvRef, tvRef2, mcode, curMember
3339 // The following arguments are outputs:
3340 // pc: bytecode instruction after the vector instruction
3341 // ndiscard: number of stack elements to discard
3342 // base: ultimate result of the vector-get
3343 // tvScratch: temporary result storage
3344 // tvRef: temporary result storage
3345 // tvRef2: temporary result storage
3346 // mcode: output MemberCode for the last member if LeaveLast
3347 // curMember: output last member value one if LeaveLast; but undefined
3348 // if the last mcode == MW
3350 // If saveResult is true, then upon completion of getHelperPre(),
3351 // tvScratch contains a reference to the result (a duplicate of what
3352 // base refers to). getHelperPost<true>(...) then saves the result
3353 // to its final location.
3354 template <bool warn
,
3356 VMExecutionContext::VectorLeaveCode mleave
>
3357 inline void OPTBLD_INLINE
VMExecutionContext::getHelperPre(
3361 TypedValue
& tvScratch
,
3362 TypedValue
& tvLiteral
,
3366 TypedValue
*& curMember
) {
3367 memberHelperPre
<false, warn
, false, false,
3368 false, 0, mleave
, saveResult
>(MEMBERHELPERPRE_ARGS
);
3371 #define GETHELPERPOST_ARGS ndiscard, tvRet, tvScratch, tvRef, tvRef2
3372 template <bool saveResult
>
3373 inline void OPTBLD_INLINE
VMExecutionContext::getHelperPost(
3374 unsigned ndiscard
, TypedValue
*& tvRet
, TypedValue
& tvScratch
,
3375 TypedValue
& tvRef
, TypedValue
& tvRef2
) {
3376 // Clean up all ndiscard elements on the stack. Actually discard
3377 // only ndiscard - 1, and overwrite the last cell with the result,
3378 // or if ndiscard is zero we actually need to allocate a cell.
3379 for (unsigned depth
= 0; depth
< ndiscard
; ++depth
) {
3380 TypedValue
* tv
= m_stack
.indTV(depth
);
3381 tvRefcountedDecRef(tv
);
3385 tvRet
= m_stack
.allocTV();
3387 m_stack
.ndiscard(ndiscard
- 1);
3388 tvRet
= m_stack
.topTV();
3390 tvRefcountedDecRef(&tvRef
);
3391 tvRefcountedDecRef(&tvRef2
);
3394 // If tvRef wasn't just allocated, we've already decref'd it in
3396 memcpy(tvRet
, &tvScratch
, sizeof(TypedValue
));
3400 #define GETHELPER_ARGS \
3401 pc, ndiscard, tvRet, base, tvScratch, tvLiteral, \
3402 tvRef, tvRef2, mcode, curMember
3403 inline void OPTBLD_INLINE
3404 VMExecutionContext::getHelper(PC
& pc
,
3408 TypedValue
& tvScratch
,
3409 TypedValue
& tvLiteral
,
3413 TypedValue
*& curMember
) {
3414 getHelperPre
<true, true, ConsumeAll
>(MEMBERHELPERPRE_ARGS
);
3415 getHelperPost
<true>(GETHELPERPOST_ARGS
);
3419 VMExecutionContext::getElem(TypedValue
* base
, TypedValue
* key
,
3421 assert(base
->m_type
!= KindOfArray
);
3423 tvWriteUninit(dest
);
3424 TypedValue
* result
= Elem
<true>(*dest
, *dest
, base
, key
);
3425 if (result
!= dest
) {
3426 tvDup(result
, dest
);
3430 template <bool setMember
,
3435 unsigned mdepth
, // extra args on stack for set (e.g. rhs)
3436 VMExecutionContext::VectorLeaveCode mleave
,
3438 inline bool OPTBLD_INLINE
VMExecutionContext::memberHelperPre(
3439 PC
& pc
, unsigned& ndiscard
, TypedValue
*& base
,
3440 TypedValue
& tvScratch
, TypedValue
& tvLiteral
,
3441 TypedValue
& tvRef
, TypedValue
& tvRef2
,
3442 MemberCode
& mcode
, TypedValue
*& curMember
) {
3443 // The caller must move pc to the vector immediate before calling
3444 // {get, set}HelperPre.
3445 const ImmVector immVec
= ImmVector::createFromStream(pc
);
3446 const uint8_t* vec
= immVec
.vec();
3447 assert(immVec
.size() > 0);
3449 // PC needs to be advanced before we do anything, otherwise if we
3450 // raise a notice in the middle of this we could resume at the wrong
3452 pc
+= immVec
.size() + sizeof(int32_t) + sizeof(int32_t);
3455 assert(mdepth
== 0);
3460 ndiscard
= immVec
.numStackValues();
3461 int depth
= mdepth
+ ndiscard
- 1;
3462 const LocationCode lcode
= LocationCode(*vec
++);
3464 TypedValue
* loc
= nullptr;
3466 Class
* const ctx
= arGetContextClass(getFP());
3469 TypedValue
* fr
= nullptr;
3472 tvWriteUninit(&tvScratch
);
3476 loc
= frame_local_inner(m_fp
, decodeVariableSizeImm(&vec
));
3479 loc
= m_stack
.indTV(depth
--);
3484 lookupd_var(m_fp
, name
, loc
, fr
);
3486 lookup_var(m_fp
, name
, loc
, fr
);
3488 if (fr
== nullptr) {
3490 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
3492 tvWriteNull(&dummy
);
3501 loc
= frame_local_inner(m_fp
, decodeVariableSizeImm(&vec
));
3504 loc
= m_stack
.indTV(depth
--);
3509 lookupd_gbl(m_fp
, name
, loc
, fr
);
3511 lookup_gbl(m_fp
, name
, loc
, fr
);
3513 if (fr
== nullptr) {
3515 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
3517 tvWriteNull(&dummy
);
3526 cref
= m_stack
.indTV(mdepth
);
3527 pname
= m_stack
.indTV(depth
--);
3530 cref
= m_stack
.indTV(mdepth
);
3531 pname
= frame_local_inner(m_fp
, decodeVariableSizeImm(&vec
));
3535 bool visible
, accessible
;
3536 assert(cref
->m_type
== KindOfClass
);
3537 const Class
* class_
= cref
->m_data
.pcls
;
3538 StringData
* name
= lookup_name(pname
);
3539 loc
= class_
->getSProp(ctx
, name
, visible
, accessible
);
3540 if (!(visible
&& accessible
)) {
3541 raise_error("Invalid static property access: %s::%s",
3542 class_
->name()->data(),
3550 int localInd
= decodeVariableSizeImm(&vec
);
3551 loc
= frame_local_inner(m_fp
, localInd
);
3553 if (loc
->m_type
== KindOfUninit
) {
3554 raise_notice(Strings::UNDEFINED_VARIABLE
,
3555 m_fp
->m_func
->localVarName(localInd
)->data());
3562 loc
= m_stack
.indTV(depth
--);
3565 assert(m_fp
->hasThis());
3566 tvScratch
.m_type
= KindOfObject
;
3567 tvScratch
.m_data
.pobj
= m_fp
->getThis();
3571 default: not_reached();
3575 tvWriteUninit(&tvLiteral
);
3576 tvWriteUninit(&tvRef
);
3577 tvWriteUninit(&tvRef2
);
3579 // Iterate through the members.
3581 mcode
= MemberCode(*vec
++);
3582 if (memberCodeHasImm(mcode
)) {
3583 int64_t memberImm
= decodeMemberCodeImm(&vec
, mcode
);
3584 if (memberCodeImmIsString(mcode
)) {
3585 tvAsVariant(&tvLiteral
) =
3586 m_fp
->m_func
->unit()->lookupLitstrId(memberImm
);
3587 assert(!IS_REFCOUNTED_TYPE(tvLiteral
.m_type
));
3588 curMember
= &tvLiteral
;
3589 } else if (mcode
== MEI
) {
3590 tvAsVariant(&tvLiteral
) = memberImm
;
3591 curMember
= &tvLiteral
;
3593 assert(memberCodeImmIsLoc(mcode
));
3594 curMember
= frame_local_inner(m_fp
, memberImm
);
3597 curMember
= (setMember
&& mcode
== MW
) ? nullptr : m_stack
.indTV(depth
--);
3600 if (mleave
== LeaveLast
) {
3614 result
= ElemU(tvScratch
, tvRef
, base
, curMember
);
3615 } else if (define
) {
3616 result
= ElemD
<warn
,reffy
>(tvScratch
, tvRef
, base
, curMember
);
3618 result
= Elem
<warn
>(tvScratch
, tvRef
, base
, curMember
);
3624 result
= Prop
<warn
, define
, unset
>(tvScratch
, tvRef
, ctx
, base
,
3630 result
= NewElem(tvScratch
, tvRef
, base
);
3632 raise_error("Cannot use [] for reading");
3638 result
= nullptr; // Silence compiler warning.
3640 assert(result
!= nullptr);
3641 ratchetRefs(result
, tvRef
, tvRef2
);
3642 // Check whether an error occurred (i.e. no result was set).
3643 if (setMember
&& result
== &tvScratch
&& result
->m_type
== KindOfUninit
) {
3649 if (mleave
== ConsumeAll
) {
3652 if (lcode
== LSC
|| lcode
== LSL
) {
3653 assert(depth
== int(mdepth
));
3655 assert(depth
== int(mdepth
) - 1);
3662 // If requested, save a copy of the result. If base already points to
3663 // tvScratch, no reference counting is necessary, because (with the
3664 // exception of the following block), tvScratch is never populated such
3665 // that it owns a reference that must be accounted for.
3666 if (base
!= &tvScratch
) {
3667 // Acquire a reference to the result via tvDup(); base points to the
3668 // result but does not own a reference.
3669 tvDup(base
, &tvScratch
);
3676 // The following arguments are outputs: (TODO put them in struct)
3677 // pc: bytecode instruction after the vector instruction
3678 // ndiscard: number of stack elements to discard
3679 // base: ultimate result of the vector-get
3680 // tvScratch: temporary result storage
3681 // tvRef: temporary result storage
3682 // tvRef2: temporary result storage
3683 // mcode: output MemberCode for the last member if LeaveLast
3684 // curMember: output last member value one if LeaveLast; but undefined
3685 // if the last mcode == MW
3686 template <bool warn
,
3690 unsigned mdepth
, // extra args on stack for set (e.g. rhs)
3691 VMExecutionContext::VectorLeaveCode mleave
>
3692 inline bool OPTBLD_INLINE
VMExecutionContext::setHelperPre(
3693 PC
& pc
, unsigned& ndiscard
, TypedValue
*& base
,
3694 TypedValue
& tvScratch
, TypedValue
& tvLiteral
,
3695 TypedValue
& tvRef
, TypedValue
& tvRef2
,
3696 MemberCode
& mcode
, TypedValue
*& curMember
) {
3697 return memberHelperPre
<true, warn
, define
, unset
,
3698 reffy
, mdepth
, mleave
, false>(MEMBERHELPERPRE_ARGS
);
3701 #define SETHELPERPOST_ARGS ndiscard, tvRef, tvRef2
3702 template <unsigned mdepth
>
3703 inline void OPTBLD_INLINE
VMExecutionContext::setHelperPost(
3704 unsigned ndiscard
, TypedValue
& tvRef
, TypedValue
& tvRef2
) {
3705 // Clean up the stack. Decref all the elements for the vector, but
3706 // leave the first mdepth (they are not part of the vector data).
3707 for (unsigned depth
= mdepth
; depth
-mdepth
< ndiscard
; ++depth
) {
3708 TypedValue
* tv
= m_stack
.indTV(depth
);
3709 tvRefcountedDecRef(tv
);
3712 // NOTE: currently the only instructions using this that have return
3713 // values on the stack also have more inputs than the -vector, so
3714 // mdepth > 0. They also always return the original top value of
3717 assert(mdepth
== 1 &&
3718 "We don't really support mdepth > 1 in setHelperPost");
3721 TypedValue
* retSrc
= m_stack
.topTV();
3722 TypedValue
* dest
= m_stack
.indTV(ndiscard
+ mdepth
- 1);
3723 assert(dest
!= retSrc
);
3724 memcpy(dest
, retSrc
, sizeof *dest
);
3728 m_stack
.ndiscard(ndiscard
);
3729 tvRefcountedDecRef(&tvRef
);
3730 tvRefcountedDecRef(&tvRef2
);
3733 inline void OPTBLD_INLINE
VMExecutionContext::iopLowInvalid(PC
& pc
) {
3734 fprintf(stderr
, "invalid bytecode executed\n");
3738 inline void OPTBLD_INLINE
VMExecutionContext::iopNop(PC
& pc
) {
3742 inline void OPTBLD_INLINE
VMExecutionContext::iopPopC(PC
& pc
) {
3747 inline void OPTBLD_INLINE
VMExecutionContext::iopPopV(PC
& pc
) {
3752 inline void OPTBLD_INLINE
VMExecutionContext::iopPopR(PC
& pc
) {
3754 if (m_stack
.topTV()->m_type
!= KindOfRef
) {
3761 inline void OPTBLD_INLINE
VMExecutionContext::iopDup(PC
& pc
) {
3766 inline void OPTBLD_INLINE
VMExecutionContext::iopBox(PC
& pc
) {
3771 inline void OPTBLD_INLINE
VMExecutionContext::iopUnbox(PC
& pc
) {
3776 inline void OPTBLD_INLINE
VMExecutionContext::iopBoxR(PC
& pc
) {
3778 TypedValue
* tv
= m_stack
.topTV();
3779 if (tv
->m_type
!= KindOfRef
) {
3784 inline void OPTBLD_INLINE
VMExecutionContext::iopUnboxR(PC
& pc
) {
3786 if (m_stack
.topTV()->m_type
== KindOfRef
) {
3791 inline void OPTBLD_INLINE
VMExecutionContext::iopNull(PC
& pc
) {
3796 inline void OPTBLD_INLINE
VMExecutionContext::iopNullUninit(PC
& pc
) {
3798 m_stack
.pushNullUninit();
3801 inline void OPTBLD_INLINE
VMExecutionContext::iopTrue(PC
& pc
) {
3806 inline void OPTBLD_INLINE
VMExecutionContext::iopFalse(PC
& pc
) {
3808 m_stack
.pushFalse();
3811 inline void OPTBLD_INLINE
VMExecutionContext::iopFile(PC
& pc
) {
3813 const StringData
* s
= m_fp
->m_func
->unit()->filepath();
3814 m_stack
.pushStaticString(const_cast<StringData
*>(s
));
3817 inline void OPTBLD_INLINE
VMExecutionContext::iopDir(PC
& pc
) {
3819 const StringData
* s
= m_fp
->m_func
->unit()->dirpath();
3820 m_stack
.pushStaticString(const_cast<StringData
*>(s
));
3823 inline void OPTBLD_INLINE
VMExecutionContext::iopInt(PC
& pc
) {
3829 inline void OPTBLD_INLINE
VMExecutionContext::iopDouble(PC
& pc
) {
3832 m_stack
.pushDouble(d
);
3835 inline void OPTBLD_INLINE
VMExecutionContext::iopString(PC
& pc
) {
3838 m_stack
.pushStaticString(s
);
3841 inline void OPTBLD_INLINE
VMExecutionContext::iopArray(PC
& pc
) {
3844 ArrayData
* a
= m_fp
->m_func
->unit()->lookupArrayId(id
);
3845 m_stack
.pushStaticArray(a
);
3848 inline void OPTBLD_INLINE
VMExecutionContext::iopNewArray(PC
& pc
) {
3850 // Clever sizing avoids extra work in HphpArray construction.
3851 auto arr
= ArrayData::Make(size_t(3U) << (HphpArray::MinLgTableSize
-2));
3852 m_stack
.pushArray(arr
);
3855 inline void OPTBLD_INLINE
VMExecutionContext::iopNewTuple(PC
& pc
) {
3858 // This constructor moves values, no inc/decref is necessary.
3859 HphpArray
* arr
= ArrayData::Make(n
, m_stack
.topC());
3860 m_stack
.ndiscard(n
);
3861 m_stack
.pushArray(arr
);
3864 inline void OPTBLD_INLINE
VMExecutionContext::iopAddElemC(PC
& pc
) {
3866 Cell
* c1
= m_stack
.topC();
3867 Cell
* c2
= m_stack
.indC(1);
3868 Cell
* c3
= m_stack
.indC(2);
3869 if (c3
->m_type
!= KindOfArray
) {
3870 raise_error("AddElemC: $3 must be an array");
3872 if (c2
->m_type
== KindOfInt64
) {
3873 tvCellAsVariant(c3
).asArrRef().set(c2
->m_data
.num
, tvAsCVarRef(c1
));
3875 tvCellAsVariant(c3
).asArrRef().set(tvAsCVarRef(c2
), tvAsCVarRef(c1
));
3881 inline void OPTBLD_INLINE
VMExecutionContext::iopAddElemV(PC
& pc
) {
3883 Var
* v1
= m_stack
.topV();
3884 Cell
* c2
= m_stack
.indC(1);
3885 Cell
* c3
= m_stack
.indC(2);
3886 if (c3
->m_type
!= KindOfArray
) {
3887 raise_error("AddElemV: $3 must be an array");
3889 if (c2
->m_type
== KindOfInt64
) {
3890 tvCellAsVariant(c3
).asArrRef().set(c2
->m_data
.num
, ref(tvAsCVarRef(v1
)));
3892 tvCellAsVariant(c3
).asArrRef().set(tvAsCVarRef(c2
), ref(tvAsCVarRef(v1
)));
3898 inline void OPTBLD_INLINE
VMExecutionContext::iopAddNewElemC(PC
& pc
) {
3900 Cell
* c1
= m_stack
.topC();
3901 Cell
* c2
= m_stack
.indC(1);
3902 if (c2
->m_type
!= KindOfArray
) {
3903 raise_error("AddNewElemC: $2 must be an array");
3905 tvCellAsVariant(c2
).asArrRef().append(tvAsCVarRef(c1
));
3909 inline void OPTBLD_INLINE
VMExecutionContext::iopAddNewElemV(PC
& pc
) {
3911 Var
* v1
= m_stack
.topV();
3912 Cell
* c2
= m_stack
.indC(1);
3913 if (c2
->m_type
!= KindOfArray
) {
3914 raise_error("AddNewElemV: $2 must be an array");
3916 tvCellAsVariant(c2
).asArrRef().append(ref(tvAsCVarRef(v1
)));
3920 inline void OPTBLD_INLINE
VMExecutionContext::iopNewCol(PC
& pc
) {
3926 case Collection::VectorType
: obj
= NEWOBJ(c_Vector
)(); break;
3927 case Collection::MapType
: obj
= NEWOBJ(c_Map
)(); break;
3928 case Collection::StableMapType
: obj
= NEWOBJ(c_StableMap
)(); break;
3929 case Collection::SetType
: obj
= NEWOBJ(c_Set
)(); break;
3930 case Collection::PairType
: obj
= NEWOBJ(c_Pair
)(); break;
3933 raise_error("NewCol: Invalid collection type");
3936 // Reserve enough room for nElms elements in advance
3938 collectionReserve(obj
, nElms
);
3940 m_stack
.pushObject(obj
);
3943 inline void OPTBLD_INLINE
VMExecutionContext::iopColAddNewElemC(PC
& pc
) {
3945 Cell
* c1
= m_stack
.topC();
3946 Cell
* c2
= m_stack
.indC(1);
3947 if (c2
->m_type
== KindOfObject
&& c2
->m_data
.pobj
->isCollection()) {
3948 collectionAppend(c2
->m_data
.pobj
, c1
);
3950 raise_error("ColAddNewElemC: $2 must be a collection");
3955 inline void OPTBLD_INLINE
VMExecutionContext::iopColAddElemC(PC
& pc
) {
3957 Cell
* c1
= m_stack
.topC();
3958 Cell
* c2
= m_stack
.indC(1);
3959 Cell
* c3
= m_stack
.indC(2);
3960 if (c3
->m_type
== KindOfObject
&& c3
->m_data
.pobj
->isCollection()) {
3961 collectionSet(c3
->m_data
.pobj
, c2
, c1
);
3963 raise_error("ColAddElemC: $3 must be a collection");
3969 inline void OPTBLD_INLINE
VMExecutionContext::iopCns(PC
& pc
) {
3972 TypedValue
* cns
= Unit::loadCns(s
);
3973 if (cns
== nullptr) {
3974 raise_notice(Strings::UNDEFINED_CONSTANT
, s
->data(), s
->data());
3975 m_stack
.pushStaticString(s
);
3978 Cell
* c1
= m_stack
.allocC();
3979 tvReadCell(cns
, c1
);
3982 inline void OPTBLD_INLINE
VMExecutionContext::iopCnsE(PC
& pc
) {
3985 TypedValue
* cns
= Unit::loadCns(s
);
3986 if (cns
== nullptr) {
3987 raise_error("Undefined constant '%s'", s
->data());
3989 Cell
* c1
= m_stack
.allocC();
3990 tvReadCell(cns
, c1
);
3993 inline void OPTBLD_INLINE
VMExecutionContext::iopCnsU(PC
& pc
) {
3995 DECODE_LITSTR(name
);
3996 DECODE_LITSTR(fallback
);
3997 TypedValue
* cns
= Unit::loadCns(name
);
3998 if (cns
== nullptr) {
3999 cns
= Unit::loadCns(fallback
);
4000 if (cns
== nullptr) {
4002 Strings::UNDEFINED_CONSTANT
,
4006 m_stack
.pushStaticString(fallback
);
4010 Cell
* c1
= m_stack
.allocC();
4011 tvReadCell(cns
, c1
);
4014 inline void OPTBLD_INLINE
VMExecutionContext::iopDefCns(PC
& pc
) {
4017 TypedValue
* tv
= m_stack
.topTV();
4018 tvAsVariant(tv
) = Unit::defCns(s
, tv
);
4021 inline void OPTBLD_INLINE
VMExecutionContext::iopClsCns(PC
& pc
) {
4023 DECODE_LITSTR(clsCnsName
);
4024 TypedValue
* tv
= m_stack
.topTV();
4025 assert(tv
->m_type
== KindOfClass
);
4026 Class
* class_
= tv
->m_data
.pcls
;
4027 assert(class_
!= nullptr);
4028 TypedValue
* clsCns
= class_
->clsCnsGet(clsCnsName
);
4029 if (clsCns
== nullptr) {
4030 raise_error("Couldn't find constant %s::%s",
4031 class_
->name()->data(), clsCnsName
->data());
4033 tvReadCell(clsCns
, tv
);
4036 inline void OPTBLD_INLINE
VMExecutionContext::iopClsCnsD(PC
& pc
) {
4038 DECODE_LITSTR(clsCnsName
);
4039 DECODE(Id
, classId
);
4040 const NamedEntityPair
& classNamedEntity
=
4041 m_fp
->m_func
->unit()->lookupNamedEntityPairId(classId
);
4043 TypedValue
* clsCns
= lookupClsCns(classNamedEntity
.second
,
4044 classNamedEntity
.first
, clsCnsName
);
4045 assert(clsCns
!= nullptr);
4046 Cell
* c1
= m_stack
.allocC();
4047 tvReadCell(clsCns
, c1
);
4050 inline void OPTBLD_INLINE
VMExecutionContext::iopConcat(PC
& pc
) {
4052 Cell
* c1
= m_stack
.topC();
4053 Cell
* c2
= m_stack
.indC(1);
4054 if (IS_STRING_TYPE(c1
->m_type
) && IS_STRING_TYPE(c2
->m_type
)) {
4055 tvCellAsVariant(c2
) = concat(tvCellAsVariant(c2
), tvCellAsCVarRef(c1
));
4057 tvCellAsVariant(c2
) = concat(tvCellAsVariant(c2
).toString(),
4058 tvCellAsCVarRef(c1
).toString());
4060 assert(c2
->m_data
.pstr
->getCount() > 0);
4064 #define MATHOP(OP, VOP) do { \
4066 Cell* c1 = m_stack.topC(); \
4067 Cell* c2 = m_stack.indC(1); \
4068 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
4069 int64_t a = c2->m_data.num; \
4070 int64_t b = c1->m_data.num; \
4071 MATHOP_DIVCHECK(0) \
4072 c2->m_data.num = a OP b; \
4077 tvCellAsVariant(c2) = VOP(tvCellAsVariant(c2), tvCellAsCVarRef(c1)); \
4081 #define MATHOP_DOUBLE(OP) \
4082 else if (c2->m_type == KindOfDouble \
4083 && c1->m_type == KindOfDouble) { \
4084 double a = c2->m_data.dbl; \
4085 double b = c1->m_data.dbl; \
4086 MATHOP_DIVCHECK(0.0) \
4087 c2->m_data.dbl = a OP b; \
4090 #define MATHOP_DIVCHECK(x)
4091 inline void OPTBLD_INLINE
VMExecutionContext::iopAdd(PC
& pc
) {
4095 inline void OPTBLD_INLINE
VMExecutionContext::iopSub(PC
& pc
) {
4099 inline void OPTBLD_INLINE
VMExecutionContext::iopMul(PC
& pc
) {
4100 MATHOP(*, multiply
);
4102 #undef MATHOP_DIVCHECK
4104 #define MATHOP_DIVCHECK(x) \
4106 raise_warning(Strings::DIVISION_BY_ZERO); \
4107 c2->m_data.num = 0; \
4108 c2->m_type = KindOfBoolean; \
4110 inline void OPTBLD_INLINE
VMExecutionContext::iopDiv(PC
& pc
) {
4112 Cell
* c1
= m_stack
.topC(); // denominator
4113 Cell
* c2
= m_stack
.indC(1); // numerator
4114 // Special handling for evenly divisible ints
4115 if (c2
->m_type
== KindOfInt64
&& c1
->m_type
== KindOfInt64
4116 && c1
->m_data
.num
!= 0 && c2
->m_data
.num
% c1
->m_data
.num
== 0) {
4117 int64_t b
= c1
->m_data
.num
;
4119 c2
->m_data
.num
/= b
;
4124 tvCellAsVariant(c2
) = divide(tvCellAsVariant(c2
), tvCellAsCVarRef(c1
));
4128 #undef MATHOP_DOUBLE
4130 #define MATHOP_DOUBLE(OP)
4131 inline void OPTBLD_INLINE
VMExecutionContext::iopMod(PC
& pc
) {
4134 #undef MATHOP_DOUBLE
4135 #undef MATHOP_DIVCHECK
4137 #define LOGICOP(OP) do { \
4139 Cell* c1 = m_stack.topC(); \
4140 Cell* c2 = m_stack.indC(1); \
4142 tvCellAsVariant(c2) = \
4143 (bool)(bool(tvCellAsVariant(c2)) OP bool(tvCellAsVariant(c1))); \
4148 inline void OPTBLD_INLINE
VMExecutionContext::iopXor(PC
& pc
) {
4153 inline void OPTBLD_INLINE
VMExecutionContext::iopNot(PC
& pc
) {
4155 Cell
* c1
= m_stack
.topC();
4156 tvCellAsVariant(c1
) = !bool(tvCellAsVariant(c1
));
4159 #define CMPOP(OP, VOP) do { \
4161 Cell* c1 = m_stack.topC(); \
4162 Cell* c2 = m_stack.indC(1); \
4163 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
4164 int64_t a = c2->m_data.num; \
4165 int64_t b = c1->m_data.num; \
4166 c2->m_data.num = (a OP b); \
4167 c2->m_type = KindOfBoolean; \
4170 int64_t result = VOP(tvCellAsVariant(c2), tvCellAsCVarRef(c1)); \
4171 tvRefcountedDecRefCell(c2); \
4172 c2->m_data.num = result; \
4173 c2->m_type = KindOfBoolean; \
4177 inline void OPTBLD_INLINE
VMExecutionContext::iopSame(PC
& pc
) {
4181 inline void OPTBLD_INLINE
VMExecutionContext::iopNSame(PC
& pc
) {
4185 inline void OPTBLD_INLINE
VMExecutionContext::iopEq(PC
& pc
) {
4189 inline void OPTBLD_INLINE
VMExecutionContext::iopNeq(PC
& pc
) {
4193 inline void OPTBLD_INLINE
VMExecutionContext::iopLt(PC
& pc
) {
4197 inline void OPTBLD_INLINE
VMExecutionContext::iopLte(PC
& pc
) {
4198 CMPOP(<=, less_or_equal
);
4201 inline void OPTBLD_INLINE
VMExecutionContext::iopGt(PC
& pc
) {
4205 inline void OPTBLD_INLINE
VMExecutionContext::iopGte(PC
& pc
) {
4206 CMPOP(>=, more_or_equal
);
4210 #define MATHOP_DOUBLE(OP)
4211 #define MATHOP_DIVCHECK(x)
4212 inline void OPTBLD_INLINE
VMExecutionContext::iopBitAnd(PC
& pc
) {
4213 MATHOP(&, bitwise_and
);
4216 inline void OPTBLD_INLINE
VMExecutionContext::iopBitOr(PC
& pc
) {
4217 MATHOP(|, bitwise_or
);
4220 inline void OPTBLD_INLINE
VMExecutionContext::iopBitXor(PC
& pc
) {
4221 MATHOP(^, bitwise_xor
);
4224 #undef MATHOP_DOUBLE
4225 #undef MATHOP_DIVCHECK
4227 inline void OPTBLD_INLINE
VMExecutionContext::iopBitNot(PC
& pc
) {
4229 Cell
* c1
= m_stack
.topC();
4230 if (LIKELY(c1
->m_type
== KindOfInt64
)) {
4231 c1
->m_data
.num
= ~c1
->m_data
.num
;
4232 } else if (c1
->m_type
== KindOfDouble
) {
4233 c1
->m_type
= KindOfInt64
;
4234 c1
->m_data
.num
= ~int64_t(c1
->m_data
.dbl
);
4235 } else if (IS_STRING_TYPE(c1
->m_type
)) {
4236 tvCellAsVariant(c1
) = ~tvCellAsVariant(c1
);
4238 raise_error("Unsupported operand type for ~");
4242 #define SHIFTOP(OP) do { \
4244 Cell* c1 = m_stack.topC(); \
4245 Cell* c2 = m_stack.indC(1); \
4246 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
4247 int64_t a = c2->m_data.num; \
4248 int64_t b = c1->m_data.num; \
4249 c2->m_data.num = a OP b; \
4252 tvCellAsVariant(c2) = tvCellAsVariant(c2).toInt64() OP \
4253 tvCellAsCVarRef(c1).toInt64(); \
4257 inline void OPTBLD_INLINE
VMExecutionContext::iopShl(PC
& pc
) {
4261 inline void OPTBLD_INLINE
VMExecutionContext::iopShr(PC
& pc
) {
4266 inline void OPTBLD_INLINE
VMExecutionContext::iopCastBool(PC
& pc
) {
4268 Cell
* c1
= m_stack
.topC();
4269 tvCastToBooleanInPlace(c1
);
4272 inline void OPTBLD_INLINE
VMExecutionContext::iopCastInt(PC
& pc
) {
4274 Cell
* c1
= m_stack
.topC();
4275 tvCastToInt64InPlace(c1
);
4278 inline void OPTBLD_INLINE
VMExecutionContext::iopCastDouble(PC
& pc
) {
4280 Cell
* c1
= m_stack
.topC();
4281 tvCastToDoubleInPlace(c1
);
4284 inline void OPTBLD_INLINE
VMExecutionContext::iopCastString(PC
& pc
) {
4286 Cell
* c1
= m_stack
.topC();
4287 tvCastToStringInPlace(c1
);
4290 inline void OPTBLD_INLINE
VMExecutionContext::iopCastArray(PC
& pc
) {
4292 Cell
* c1
= m_stack
.topC();
4293 tvCastToArrayInPlace(c1
);
4296 inline void OPTBLD_INLINE
VMExecutionContext::iopCastObject(PC
& pc
) {
4298 Cell
* c1
= m_stack
.topC();
4299 tvCastToObjectInPlace(c1
);
4302 inline bool OPTBLD_INLINE
VMExecutionContext::cellInstanceOf(
4303 TypedValue
* tv
, const NamedEntity
* ne
) {
4304 assert(tv
->m_type
!= KindOfRef
);
4305 if (tv
->m_type
== KindOfObject
) {
4306 Class
* cls
= Unit::lookupClass(ne
);
4307 if (cls
) return tv
->m_data
.pobj
->instanceof(cls
);
4308 } else if (tv
->m_type
== KindOfArray
) {
4309 Class
* cls
= Unit::lookupClass(ne
);
4310 if (cls
&& interface_supports_array(cls
->name())) {
4317 inline void OPTBLD_INLINE
VMExecutionContext::iopInstanceOf(PC
& pc
) {
4319 Cell
* c1
= m_stack
.topC(); // c2 instanceof c1
4320 Cell
* c2
= m_stack
.indC(1);
4322 if (IS_STRING_TYPE(c1
->m_type
)) {
4323 const NamedEntity
* rhs
= Unit::GetNamedEntity(c1
->m_data
.pstr
);
4324 r
= cellInstanceOf(c2
, rhs
);
4325 } else if (c1
->m_type
== KindOfObject
) {
4326 if (c2
->m_type
== KindOfObject
) {
4327 ObjectData
* lhs
= c2
->m_data
.pobj
;
4328 ObjectData
* rhs
= c1
->m_data
.pobj
;
4329 r
= lhs
->instanceof(rhs
->getVMClass());
4332 raise_error("Class name must be a valid object or a string");
4335 tvRefcountedDecRefCell(c2
);
4337 c2
->m_type
= KindOfBoolean
;
4340 inline void OPTBLD_INLINE
VMExecutionContext::iopInstanceOfD(PC
& pc
) {
4343 if (shouldProfile()) {
4344 Class::profileInstanceOf(m_fp
->m_func
->unit()->lookupLitstrId(id
));
4346 const NamedEntity
* ne
= m_fp
->m_func
->unit()->lookupNamedEntityId(id
);
4347 Cell
* c1
= m_stack
.topC();
4348 bool r
= cellInstanceOf(c1
, ne
);
4349 tvRefcountedDecRefCell(c1
);
4351 c1
->m_type
= KindOfBoolean
;
4354 inline void OPTBLD_INLINE
VMExecutionContext::iopPrint(PC
& pc
) {
4356 Cell
* c1
= m_stack
.topC();
4357 print(tvCellAsVariant(c1
).toString());
4358 tvRefcountedDecRefCell(c1
);
4359 c1
->m_type
= KindOfInt64
;
4363 inline void OPTBLD_INLINE
VMExecutionContext::iopClone(PC
& pc
) {
4365 TypedValue
* tv
= m_stack
.topTV();
4366 if (tv
->m_type
!= KindOfObject
) {
4367 raise_error("clone called on non-object");
4369 ObjectData
* obj
= tv
->m_data
.pobj
;
4370 const Class
* class_ UNUSED
= obj
->getVMClass();
4371 ObjectData
* newobj
= obj
->clone();
4374 tv
->m_type
= KindOfObject
;
4375 tv
->m_data
.pobj
= newobj
;
4378 inline int OPTBLD_INLINE
4379 VMExecutionContext::handleUnwind(UnwindStatus unwindType
) {
4381 if (unwindType
== UnwindPropagate
) {
4382 longJumpType
= EXCEPTION_PROPAGATE
;
4383 if (m_nestedVMs
.empty()) {
4388 assert(unwindType
== UnwindResumeVM
);
4389 longJumpType
= EXCEPTION_RESUMEVM
;
4391 return longJumpType
;
4394 inline void OPTBLD_INLINE
VMExecutionContext::iopExit(PC
& pc
) {
4397 Cell
* c1
= m_stack
.topC();
4398 if (c1
->m_type
== KindOfInt64
) {
4399 exitCode
= c1
->m_data
.num
;
4401 print(tvCellAsVariant(c1
).toString());
4404 throw ExitException(exitCode
);
4407 inline void OPTBLD_INLINE
VMExecutionContext::iopFatal(PC
& pc
) {
4409 TypedValue
* top
= m_stack
.topTV();
4411 DECODE_IVA(skipFrame
);
4412 if (IS_STRING_TYPE(top
->m_type
)) {
4413 msg
= top
->m_data
.pstr
->data();
4415 msg
= "Fatal error message not a string";
4419 raise_error_without_first_frame(msg
);
4425 #define JMP_SURPRISE_CHECK() \
4426 if (offset < 0 && UNLIKELY(Transl::TargetCache::loadConditionFlags())) { \
4428 EventHook::CheckSurprise(); \
4431 inline void OPTBLD_INLINE
VMExecutionContext::iopJmp(PC
& pc
) {
4433 DECODE_JMP(Offset
, offset
);
4434 JMP_SURPRISE_CHECK();
4438 #define JMPOP(OP, VOP) do { \
4439 Cell* c1 = m_stack.topC(); \
4440 if (c1->m_type == KindOfInt64 || c1->m_type == KindOfBoolean) { \
4441 int64_t n = c1->m_data.num; \
4444 DECODE_JMP(Offset, offset); \
4445 JMP_SURPRISE_CHECK(); \
4449 pc += 1 + sizeof(Offset); \
4453 if (VOP(tvCellAsCVarRef(c1))) { \
4455 DECODE_JMP(Offset, offset); \
4456 JMP_SURPRISE_CHECK(); \
4460 pc += 1 + sizeof(Offset); \
4465 inline void OPTBLD_INLINE
VMExecutionContext::iopJmpZ(PC
& pc
) {
4469 inline void OPTBLD_INLINE
VMExecutionContext::iopJmpNZ(PC
& pc
) {
4473 #undef JMP_SURPRISE_CHECK
4476 MATCH_NORMAL
, // value was converted to an int: match normally
4477 MATCH_NONZERO
, // can't be converted to an int: match first nonzero case
4478 MATCH_DEFAULT
, // can't be converted to an int: match default case
4481 static SwitchMatch
doubleCheck(double d
, int64_t& out
) {
4482 if (int64_t(d
) == d
) {
4484 return MATCH_NORMAL
;
4486 return MATCH_DEFAULT
;
4490 inline void OPTBLD_INLINE
VMExecutionContext::iopSwitch(PC
& pc
) {
4493 DECODE(int32_t, veclen
);
4495 Offset
* jmptab
= (Offset
*)pc
;
4496 pc
+= veclen
* sizeof(*jmptab
);
4497 DECODE(int64_t, base
);
4498 DECODE_IVA(bounded
);
4500 TypedValue
* val
= m_stack
.topTV();
4502 assert(val
->m_type
== KindOfInt64
);
4503 // Continuation switch: no bounds checking needed
4504 int64_t label
= val
->m_data
.num
;
4506 assert(label
>= 0 && label
< veclen
);
4507 pc
= origPC
+ jmptab
[label
];
4509 // Generic integer switch
4511 SwitchMatch match
= MATCH_NORMAL
;
4513 switch (val
->m_type
) {
4520 // bool(true) is equal to any non-zero int, bool(false) == 0
4521 if (val
->m_data
.num
) {
4522 match
= MATCH_NONZERO
;
4529 intval
= val
->m_data
.num
;
4533 match
= doubleCheck(val
->m_data
.dbl
, intval
);
4536 case KindOfStaticString
:
4537 case KindOfString
: {
4539 DataType t
= val
->m_data
.pstr
->isNumericWithVal(intval
, dval
, 1);
4546 match
= doubleCheck(dval
, intval
);
4556 tvRefcountedDecRef(val
);
4561 match
= MATCH_DEFAULT
;
4566 intval
= val
->m_data
.pobj
->o_toInt64();
4575 if (match
!= MATCH_NORMAL
||
4576 intval
< base
|| intval
>= (base
+ veclen
- 2)) {
4580 pc
= origPC
+ jmptab
[veclen
- 1];
4584 pc
= origPC
+ jmptab
[veclen
- 2];
4588 pc
= origPC
+ jmptab
[intval
- base
];
4593 inline void OPTBLD_INLINE
VMExecutionContext::iopSSwitch(PC
& pc
) {
4596 DECODE(int32_t, veclen
);
4598 unsigned cases
= veclen
- 1; // the last vector item is the default case
4599 StrVecItem
* jmptab
= (StrVecItem
*)pc
;
4600 pc
+= veclen
* sizeof(*jmptab
);
4602 TypedValue
* val
= m_stack
.topTV();
4603 Unit
* u
= m_fp
->m_func
->unit();
4605 for (i
= 0; i
< cases
; ++i
) {
4606 auto& item
= jmptab
[i
];
4607 const StringData
* str
= u
->lookupLitstrId(item
.str
);
4608 if (tvAsVariant(val
).equal(str
)) {
4609 pc
= origPC
+ item
.dest
;
4615 pc
= origPC
+ jmptab
[veclen
-1].dest
;
4620 inline void OPTBLD_INLINE
VMExecutionContext::iopRetC(PC
& pc
) {
4622 uint soff
= m_fp
->m_soff
;
4623 assert(!m_fp
->m_func
->isGenerator());
4625 // Call the runtime helpers to free the local variables and iterators
4626 frame_free_locals_inl(m_fp
, m_fp
->m_func
->numLocals());
4627 ActRec
* sfp
= m_fp
->arGetSfp();
4628 // Memcpy the the return value on top of the activation record. This works
4629 // the same regardless of whether the return value is boxed or not.
4630 TypedValue
* retval_ptr
= &m_fp
->m_r
;
4631 memcpy(retval_ptr
, m_stack
.topTV(), sizeof(TypedValue
));
4633 m_stack
.ndiscard(m_fp
->m_func
->numSlotsInFrame() + 1);
4635 if (LIKELY(sfp
!= m_fp
)) {
4636 // Restore caller's execution state.
4638 pc
= m_fp
->m_func
->unit()->entry() + m_fp
->m_func
->base() + soff
;
4640 assert(m_stack
.topTV() == retval_ptr
);
4642 // No caller; terminate.
4646 std::ostringstream os
;
4647 m_stack
.toStringElm(os
, m_stack
.topTV(), m_fp
);
4649 Trace::trace("Return %s from VMExecutionContext::dispatch("
4650 "%p)\n", os
.str().c_str(), m_fp
));
4657 inline void OPTBLD_INLINE
VMExecutionContext::iopRetV(PC
& pc
) {
4661 inline void OPTBLD_INLINE
VMExecutionContext::iopUnwind(PC
& pc
) {
4662 assert(!m_faults
.empty());
4663 assert(m_faults
.back().m_savedRaiseOffset
!= kInvalidOffset
);
4664 throw VMPrepareUnwind();
4667 inline void OPTBLD_INLINE
VMExecutionContext::iopThrow(PC
& pc
) {
4668 Cell
* c1
= m_stack
.topC();
4669 if (c1
->m_type
!= KindOfObject
||
4670 !static_cast<Instance
*>(c1
->m_data
.pobj
)->
4671 instanceof(SystemLib::s_ExceptionClass
)) {
4672 raise_error("Exceptions must be valid objects derived from the "
4673 "Exception base class");
4676 Object
obj(c1
->m_data
.pobj
);
4678 DEBUGGER_ATTACHED_ONLY(phpDebuggerExceptionHook(obj
.get()));
4682 inline void OPTBLD_INLINE
VMExecutionContext::iopAGetC(PC
& pc
) {
4684 TypedValue
* tv
= m_stack
.topTV();
4685 lookupClsRef(tv
, tv
, true);
4688 inline void OPTBLD_INLINE
VMExecutionContext::iopAGetL(PC
& pc
) {
4691 TypedValue
* top
= m_stack
.allocTV();
4692 TypedValue
* fr
= frame_local_inner(m_fp
, local
);
4693 lookupClsRef(fr
, top
);
4696 static void raise_undefined_local(ActRec
* fp
, Id pind
) {
4697 assert(pind
< fp
->m_func
->numNamedLocals());
4698 raise_notice(Strings::UNDEFINED_VARIABLE
,
4699 fp
->m_func
->localVarName(pind
)->data());
4702 static inline void cgetl_inner_body(TypedValue
* fr
, TypedValue
* to
) {
4703 assert(fr
->m_type
!= KindOfUninit
);
4705 if (to
->m_type
== KindOfRef
) {
4710 static inline void cgetl_body(ActRec
* fp
,
4714 if (fr
->m_type
== KindOfUninit
) {
4715 // `to' is uninitialized here, so we need to tvWriteNull before
4716 // possibly causing stack unwinding.
4718 raise_undefined_local(fp
, pind
);
4720 cgetl_inner_body(fr
, to
);
4724 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetL(PC
& pc
) {
4727 Cell
* to
= m_stack
.allocC();
4728 TypedValue
* fr
= frame_local(m_fp
, local
);
4729 cgetl_body(m_fp
, fr
, to
, local
);
4732 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetL2(PC
& pc
) {
4735 TypedValue
* oldTop
= m_stack
.topTV();
4736 TypedValue
* newTop
= m_stack
.allocTV();
4737 memcpy(newTop
, oldTop
, sizeof *newTop
);
4739 TypedValue
* fr
= frame_local(m_fp
, local
);
4740 cgetl_body(m_fp
, fr
, to
, local
);
4743 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetL3(PC
& pc
) {
4746 TypedValue
* oldTop
= m_stack
.topTV();
4747 TypedValue
* oldSubTop
= m_stack
.indTV(1);
4748 TypedValue
* newTop
= m_stack
.allocTV();
4749 memmove(newTop
, oldTop
, sizeof *oldTop
* 2);
4750 Cell
* to
= oldSubTop
;
4751 TypedValue
* fr
= frame_local(m_fp
, local
);
4752 cgetl_body(m_fp
, fr
, to
, local
);
4755 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetN(PC
& pc
) {
4758 TypedValue
* to
= m_stack
.topTV();
4759 TypedValue
* fr
= nullptr;
4760 lookup_var(m_fp
, name
, to
, fr
);
4761 if (fr
== nullptr || fr
->m_type
== KindOfUninit
) {
4762 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
4763 tvRefcountedDecRefCell(to
);
4766 tvRefcountedDecRefCell(to
);
4767 cgetl_inner_body(fr
, to
);
4769 decRefStr(name
); // TODO(#1146727): leaks during exceptions
4772 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetG(PC
& pc
) {
4775 TypedValue
* to
= m_stack
.topTV();
4776 TypedValue
* fr
= nullptr;
4777 lookup_gbl(m_fp
, name
, to
, fr
);
4778 if (fr
== nullptr) {
4780 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
4782 tvRefcountedDecRefCell(to
);
4784 } else if (fr
->m_type
== KindOfUninit
) {
4785 raise_notice(Strings::UNDEFINED_VARIABLE
, name
->data());
4786 tvRefcountedDecRefCell(to
);
4789 tvRefcountedDecRefCell(to
);
4790 cgetl_inner_body(fr
, to
);
4792 decRefStr(name
); // TODO(#1146727): leaks during exceptions
4795 #define SPROP_OP_PRELUDE \
4797 TypedValue* clsref = m_stack.topTV(); \
4798 TypedValue* nameCell = m_stack.indTV(1); \
4799 TypedValue* output = nameCell; \
4801 bool visible, accessible; \
4802 lookup_sprop(m_fp, clsref, name, nameCell, val, visible, \
4805 #define SPROP_OP_POSTLUDE \
4808 #define GETS(box) do { \
4810 if (!(visible && accessible)) { \
4811 raise_error("Invalid static property access: %s::%s", \
4812 clsref->m_data.pcls->name()->data(), \
4816 if (val->m_type != KindOfRef) { \
4819 tvDupVar(val, output); \
4821 tvReadCell(val, output); \
4827 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetS(PC
& pc
) {
4830 if (shouldProfile() && name
&& name
->isStatic()) {
4831 recordType(TypeProfileKey(TypeProfileKey::StaticPropName
, name
),
4832 m_stack
.top()->m_type
);
4836 inline void OPTBLD_INLINE
VMExecutionContext::iopCGetM(PC
& pc
) {
4839 DECLARE_GETHELPER_ARGS
4840 getHelper(GETHELPER_ARGS
);
4841 if (tvRet
->m_type
== KindOfRef
) {
4844 assert(hasImmVector(*oldPC
));
4845 const ImmVector
& immVec
= ImmVector::createFromStream(oldPC
+ 1);
4848 if (immVec
.decodeLastMember(curUnit(), name
, mc
)) {
4849 recordType(TypeProfileKey(mc
, name
), m_stack
.top()->m_type
);
4853 static inline void vgetl_body(TypedValue
* fr
, TypedValue
* to
) {
4854 if (fr
->m_type
!= KindOfRef
) {
4860 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetL(PC
& pc
) {
4863 Var
* to
= m_stack
.allocV();
4864 TypedValue
* fr
= frame_local(m_fp
, local
);
4868 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetN(PC
& pc
) {
4871 TypedValue
* to
= m_stack
.topTV();
4872 TypedValue
* fr
= nullptr;
4873 lookupd_var(m_fp
, name
, to
, fr
);
4874 assert(fr
!= nullptr);
4875 tvRefcountedDecRefCell(to
);
4880 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetG(PC
& pc
) {
4883 TypedValue
* to
= m_stack
.topTV();
4884 TypedValue
* fr
= nullptr;
4885 lookupd_gbl(m_fp
, name
, to
, fr
);
4886 assert(fr
!= nullptr);
4887 tvRefcountedDecRefCell(to
);
4892 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetS(PC
& pc
) {
4898 inline void OPTBLD_INLINE
VMExecutionContext::iopVGetM(PC
& pc
) {
4900 DECLARE_SETHELPER_ARGS
4901 TypedValue
* tv1
= m_stack
.allocTV();
4903 if (!setHelperPre
<false, true, false, true, 1,
4904 ConsumeAll
>(MEMBERHELPERPRE_ARGS
)) {
4905 if (base
->m_type
!= KindOfRef
) {
4908 tvDupVar(base
, tv1
);
4913 setHelperPost
<1>(SETHELPERPOST_ARGS
);
4916 inline void OPTBLD_INLINE
VMExecutionContext::iopIssetN(PC
& pc
) {
4919 TypedValue
* tv1
= m_stack
.topTV();
4920 TypedValue
* tv
= nullptr;
4922 lookup_var(m_fp
, name
, tv1
, tv
);
4923 if (tv
== nullptr) {
4926 e
= isset(tvAsCVarRef(tv
));
4928 tvRefcountedDecRefCell(tv1
);
4929 tv1
->m_data
.num
= e
;
4930 tv1
->m_type
= KindOfBoolean
;
4934 inline void OPTBLD_INLINE
VMExecutionContext::iopIssetG(PC
& pc
) {
4937 TypedValue
* tv1
= m_stack
.topTV();
4938 TypedValue
* tv
= nullptr;
4940 lookup_gbl(m_fp
, name
, tv1
, tv
);
4941 if (tv
== nullptr) {
4944 e
= isset(tvAsCVarRef(tv
));
4946 tvRefcountedDecRefCell(tv1
);
4947 tv1
->m_data
.num
= e
;
4948 tv1
->m_type
= KindOfBoolean
;
4952 inline void OPTBLD_INLINE
VMExecutionContext::iopIssetS(PC
& pc
) {
4956 if (!(visible
&& accessible
)) {
4959 e
= isset(tvAsCVarRef(val
));
4962 output
->m_data
.num
= e
;
4963 output
->m_type
= KindOfBoolean
;
4967 inline void OPTBLD_INLINE
VMExecutionContext::iopIssetM(PC
& pc
) {
4969 DECLARE_GETHELPER_ARGS
4970 getHelperPre
<false, false, LeaveLast
>(MEMBERHELPERPRE_ARGS
);
4971 // Process last member specially, in order to employ the IssetElem/IssetProp
4972 // operations. (TODO combine with EmptyM.)
4973 bool issetResult
= false;
4979 issetResult
= IssetEmptyElem
<false>(tvScratch
, tvRef
, base
, curMember
);
4985 Class
* ctx
= arGetContextClass(m_fp
);
4986 issetResult
= IssetEmptyProp
<false>(ctx
, base
, curMember
);
4989 default: assert(false);
4991 getHelperPost
<false>(GETHELPERPOST_ARGS
);
4992 tvRet
->m_data
.num
= issetResult
;
4993 tvRet
->m_type
= KindOfBoolean
;
4996 #define IOP_TYPE_CHECK_INSTR_L(checkInit, what, predicate) \
4997 inline void OPTBLD_INLINE VMExecutionContext::iopIs ## what ## L(PC& pc) { \
5000 TypedValue* tv = frame_local(m_fp, local); \
5001 if (checkInit && tv->m_type == KindOfUninit) { \
5002 raise_undefined_local(m_fp, local); \
5004 bool ret = predicate(tvAsCVarRef(tv)); \
5005 TypedValue* topTv = m_stack.allocTV(); \
5006 topTv->m_data.num = ret; \
5007 topTv->m_type = KindOfBoolean; \
5010 #define IOP_TYPE_CHECK_INSTR_C(checkInit, what, predicate) \
5011 inline void OPTBLD_INLINE VMExecutionContext::iopIs ## what ## C(PC& pc) { \
5013 TypedValue* topTv = m_stack.topTV(); \
5014 assert(topTv->m_type != KindOfRef); \
5015 bool ret = predicate(tvAsCVarRef(topTv)); \
5016 tvRefcountedDecRefCell(topTv); \
5017 topTv->m_data.num = ret; \
5018 topTv->m_type = KindOfBoolean; \
5021 #define IOP_TYPE_CHECK_INSTR(checkInit, what, predicate) \
5022 IOP_TYPE_CHECK_INSTR_L(checkInit, what, predicate) \
5023 IOP_TYPE_CHECK_INSTR_C(checkInit, what, predicate) \
5025 IOP_TYPE_CHECK_INSTR_L(false, set, isset)
5026 IOP_TYPE_CHECK_INSTR(true, Null
, is_null
)
5027 IOP_TYPE_CHECK_INSTR(true, Array
, is_array
)
5028 IOP_TYPE_CHECK_INSTR(true, String
, is_string
)
5029 IOP_TYPE_CHECK_INSTR(true, Object
, is_object
)
5030 IOP_TYPE_CHECK_INSTR(true, Int
, is_int
)
5031 IOP_TYPE_CHECK_INSTR(true, Double
, is_double
)
5032 IOP_TYPE_CHECK_INSTR(true, Bool
, is_bool
)
5033 #undef IOP_TYPE_CHECK_INSTR
5035 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyL(PC
& pc
) {
5038 TypedValue
* loc
= frame_local(m_fp
, local
);
5039 bool e
= empty(tvAsCVarRef(loc
));
5040 TypedValue
* tv1
= m_stack
.allocTV();
5041 tv1
->m_data
.num
= e
;
5042 tv1
->m_type
= KindOfBoolean
;
5045 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyN(PC
& pc
) {
5048 TypedValue
* tv1
= m_stack
.topTV();
5049 TypedValue
* tv
= nullptr;
5051 lookup_var(m_fp
, name
, tv1
, tv
);
5052 if (tv
== nullptr) {
5055 e
= empty(tvAsCVarRef(tv
));
5057 tvRefcountedDecRefCell(tv1
);
5058 tv1
->m_data
.num
= e
;
5059 tv1
->m_type
= KindOfBoolean
;
5063 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyG(PC
& pc
) {
5066 TypedValue
* tv1
= m_stack
.topTV();
5067 TypedValue
* tv
= nullptr;
5069 lookup_gbl(m_fp
, name
, tv1
, tv
);
5070 if (tv
== nullptr) {
5073 e
= empty(tvAsCVarRef(tv
));
5075 tvRefcountedDecRefCell(tv1
);
5076 tv1
->m_data
.num
= e
;
5077 tv1
->m_type
= KindOfBoolean
;
5081 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyS(PC
& pc
) {
5085 if (!(visible
&& accessible
)) {
5088 e
= empty(tvAsCVarRef(val
));
5091 output
->m_data
.num
= e
;
5092 output
->m_type
= KindOfBoolean
;
5096 inline void OPTBLD_INLINE
VMExecutionContext::iopEmptyM(PC
& pc
) {
5098 DECLARE_GETHELPER_ARGS
5099 getHelperPre
<false, false, LeaveLast
>(MEMBERHELPERPRE_ARGS
);
5100 // Process last member specially, in order to employ the EmptyElem/EmptyProp
5101 // operations. (TODO combine with IssetM)
5102 bool emptyResult
= false;
5108 emptyResult
= IssetEmptyElem
<true>(tvScratch
, tvRef
, base
, curMember
);
5114 Class
* ctx
= arGetContextClass(m_fp
);
5115 emptyResult
= IssetEmptyProp
<true>(ctx
, base
, curMember
);
5118 default: assert(false);
5120 getHelperPost
<false>(GETHELPERPOST_ARGS
);
5121 tvRet
->m_data
.num
= emptyResult
;
5122 tvRet
->m_type
= KindOfBoolean
;
5125 inline void OPTBLD_INLINE
VMExecutionContext::iopAKExists(PC
& pc
) {
5127 TypedValue
* arr
= m_stack
.topTV();
5128 TypedValue
* key
= arr
+ 1;
5129 bool result
= f_array_key_exists(tvAsCVarRef(key
), tvAsCVarRef(arr
));
5131 tvRefcountedDecRef(key
);
5132 key
->m_data
.num
= result
;
5133 key
->m_type
= KindOfBoolean
;
5136 inline void OPTBLD_INLINE
VMExecutionContext::iopArrayIdx(PC
& pc
) {
5138 TypedValue
* def
= m_stack
.topTV();
5139 TypedValue
* arr
= m_stack
.indTV(1);
5140 TypedValue
* key
= m_stack
.indTV(2);
5142 Variant result
= f_hphp_array_idx(tvAsCVarRef(key
),
5147 tvAsVariant(key
) = result
;
5150 inline void OPTBLD_INLINE
VMExecutionContext::iopSetL(PC
& pc
) {
5153 assert(local
< m_fp
->m_func
->numLocals());
5154 Cell
* fr
= m_stack
.topC();
5155 TypedValue
* to
= frame_local(m_fp
, local
);
5159 inline void OPTBLD_INLINE
VMExecutionContext::iopSetN(PC
& pc
) {
5162 Cell
* fr
= m_stack
.topC();
5163 TypedValue
* tv2
= m_stack
.indTV(1);
5164 TypedValue
* to
= nullptr;
5165 lookupd_var(m_fp
, name
, tv2
, to
);
5166 assert(to
!= nullptr);
5168 memcpy((void*)tv2
, (void*)fr
, sizeof(TypedValue
));
5173 inline void OPTBLD_INLINE
VMExecutionContext::iopSetG(PC
& pc
) {
5176 Cell
* fr
= m_stack
.topC();
5177 TypedValue
* tv2
= m_stack
.indTV(1);
5178 TypedValue
* to
= nullptr;
5179 lookupd_gbl(m_fp
, name
, tv2
, to
);
5180 assert(to
!= nullptr);
5182 memcpy((void*)tv2
, (void*)fr
, sizeof(TypedValue
));
5187 inline void OPTBLD_INLINE
VMExecutionContext::iopSetS(PC
& pc
) {
5189 TypedValue
* tv1
= m_stack
.topTV();
5190 TypedValue
* classref
= m_stack
.indTV(1);
5191 TypedValue
* propn
= m_stack
.indTV(2);
5192 TypedValue
* output
= propn
;
5195 bool visible
, accessible
;
5196 lookup_sprop(m_fp
, classref
, name
, propn
, val
, visible
, accessible
);
5197 if (!(visible
&& accessible
)) {
5198 raise_error("Invalid static property access: %s::%s",
5199 classref
->m_data
.pcls
->name()->data(),
5203 tvRefcountedDecRefCell(propn
);
5204 memcpy(output
, tv1
, sizeof(TypedValue
));
5205 m_stack
.ndiscard(2);
5209 inline void OPTBLD_INLINE
VMExecutionContext::iopSetM(PC
& pc
) {
5211 DECLARE_SETHELPER_ARGS
5212 if (!setHelperPre
<false, true, false, false, 1,
5213 LeaveLast
>(MEMBERHELPERPRE_ARGS
)) {
5214 Cell
* c1
= m_stack
.topC();
5217 SetNewElem
<true>(base
, c1
);
5224 StringData
* result
= SetElem
<true>(base
, curMember
, c1
);
5226 tvRefcountedDecRefCell(c1
);
5227 c1
->m_type
= KindOfString
;
5228 c1
->m_data
.pstr
= result
;
5235 Class
* ctx
= arGetContextClass(m_fp
);
5236 SetProp
<true>(ctx
, base
, curMember
, c1
);
5239 default: assert(false);
5243 setHelperPost
<1>(SETHELPERPOST_ARGS
);
5246 inline void OPTBLD_INLINE
VMExecutionContext::iopSetWithRefLM(PC
& pc
) {
5248 DECLARE_SETHELPER_ARGS
5249 bool skip
= setHelperPre
<false, true, false, false, 0,
5250 ConsumeAll
>(MEMBERHELPERPRE_ARGS
);
5253 TypedValue
* from
= frame_local(m_fp
, local
);
5254 tvAsVariant(base
) = withRefBind(tvAsVariant(from
));
5256 setHelperPost
<0>(SETHELPERPOST_ARGS
);
5259 inline void OPTBLD_INLINE
VMExecutionContext::iopSetWithRefRM(PC
& pc
) {
5261 DECLARE_SETHELPER_ARGS
5262 bool skip
= setHelperPre
<false, true, false, false, 1,
5263 ConsumeAll
>(MEMBERHELPERPRE_ARGS
);
5265 TypedValue
* from
= m_stack
.top();
5266 tvAsVariant(base
) = withRefBind(tvAsVariant(from
));
5268 setHelperPost
<0>(SETHELPERPOST_ARGS
);
5272 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpL(PC
& pc
) {
5275 DECODE(unsigned char, op
);
5276 Cell
* fr
= m_stack
.topC();
5277 TypedValue
* to
= frame_local(m_fp
, local
);
5278 SETOP_BODY(to
, op
, fr
);
5279 tvRefcountedDecRefCell(fr
);
5283 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpN(PC
& pc
) {
5285 DECODE(unsigned char, op
);
5287 Cell
* fr
= m_stack
.topC();
5288 TypedValue
* tv2
= m_stack
.indTV(1);
5289 TypedValue
* to
= nullptr;
5290 // XXX We're probably not getting warnings totally correct here
5291 lookupd_var(m_fp
, name
, tv2
, to
);
5292 assert(to
!= nullptr);
5293 SETOP_BODY(to
, op
, fr
);
5294 tvRefcountedDecRef(fr
);
5295 tvRefcountedDecRef(tv2
);
5296 tvReadCell(to
, tv2
);
5301 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpG(PC
& pc
) {
5303 DECODE(unsigned char, op
);
5305 Cell
* fr
= m_stack
.topC();
5306 TypedValue
* tv2
= m_stack
.indTV(1);
5307 TypedValue
* to
= nullptr;
5308 // XXX We're probably not getting warnings totally correct here
5309 lookupd_gbl(m_fp
, name
, tv2
, to
);
5310 assert(to
!= nullptr);
5311 SETOP_BODY(to
, op
, fr
);
5312 tvRefcountedDecRef(fr
);
5313 tvRefcountedDecRef(tv2
);
5314 tvReadCell(to
, tv2
);
5319 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpS(PC
& pc
) {
5321 DECODE(unsigned char, op
);
5322 Cell
* fr
= m_stack
.topC();
5323 TypedValue
* classref
= m_stack
.indTV(1);
5324 TypedValue
* propn
= m_stack
.indTV(2);
5325 TypedValue
* output
= propn
;
5328 bool visible
, accessible
;
5329 lookup_sprop(m_fp
, classref
, name
, propn
, val
, visible
, accessible
);
5330 if (!(visible
&& accessible
)) {
5331 raise_error("Invalid static property access: %s::%s",
5332 classref
->m_data
.pcls
->name()->data(),
5335 SETOP_BODY(val
, op
, fr
);
5336 tvRefcountedDecRefCell(propn
);
5337 tvRefcountedDecRef(fr
);
5338 tvReadCell(val
, output
);
5339 m_stack
.ndiscard(2);
5343 inline void OPTBLD_INLINE
VMExecutionContext::iopSetOpM(PC
& pc
) {
5345 DECODE(unsigned char, op
);
5346 DECLARE_SETHELPER_ARGS
5347 if (!setHelperPre
<MoreWarnings
, true, false, false, 1,
5348 LeaveLast
>(MEMBERHELPERPRE_ARGS
)) {
5350 Cell
* rhs
= m_stack
.topC();
5353 result
= SetOpNewElem(tvScratch
, tvRef
, op
, base
, rhs
);
5360 result
= SetOpElem(tvScratch
, tvRef
, op
, base
, curMember
, rhs
);
5365 Class
*ctx
= arGetContextClass(m_fp
);
5366 result
= SetOpProp(tvScratch
, tvRef
, ctx
, op
, base
, curMember
, rhs
);
5371 result
= nullptr; // Silence compiler warning.
5375 tvRefcountedDecRef(rhs
);
5376 tvReadCell(result
, rhs
);
5378 setHelperPost
<1>(SETHELPERPOST_ARGS
);
5381 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecL(PC
& pc
) {
5384 DECODE(unsigned char, op
);
5385 TypedValue
* to
= m_stack
.allocTV();
5387 TypedValue
* fr
= frame_local(m_fp
, local
);
5388 IncDecBody
<true>(op
, fr
, to
);
5391 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecN(PC
& pc
) {
5393 DECODE(unsigned char, op
);
5395 TypedValue
* nameCell
= m_stack
.topTV();
5396 TypedValue
* local
= nullptr;
5397 // XXX We're probably not getting warnings totally correct here
5398 lookupd_var(m_fp
, name
, nameCell
, local
);
5399 assert(local
!= nullptr);
5400 IncDecBody
<true>(op
, local
, nameCell
);
5404 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecG(PC
& pc
) {
5406 DECODE(unsigned char, op
);
5408 TypedValue
* nameCell
= m_stack
.topTV();
5409 TypedValue
* gbl
= nullptr;
5410 // XXX We're probably not getting warnings totally correct here
5411 lookupd_gbl(m_fp
, name
, nameCell
, gbl
);
5412 assert(gbl
!= nullptr);
5413 IncDecBody
<true>(op
, gbl
, nameCell
);
5417 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecS(PC
& pc
) {
5420 DECODE(unsigned char, op
);
5421 if (!(visible
&& accessible
)) {
5422 raise_error("Invalid static property access: %s::%s",
5423 clsref
->m_data
.pcls
->name()->data(),
5426 tvRefcountedDecRefCell(nameCell
);
5427 IncDecBody
<true>(op
, val
, output
);
5432 inline void OPTBLD_INLINE
VMExecutionContext::iopIncDecM(PC
& pc
) {
5434 DECODE(unsigned char, op
);
5435 DECLARE_SETHELPER_ARGS
5438 if (!setHelperPre
<MoreWarnings
, true, false, false, 0,
5439 LeaveLast
>(MEMBERHELPERPRE_ARGS
)) {
5441 IncDecNewElem
<true>(tvScratch
, tvRef
, op
, base
, to
);
5448 IncDecElem
<true>(tvScratch
, tvRef
, op
, base
, curMember
, to
);
5453 Class
* ctx
= arGetContextClass(m_fp
);
5454 IncDecProp
<true>(tvScratch
, tvRef
, ctx
, op
, base
, curMember
, to
);
5457 default: assert(false);
5461 setHelperPost
<0>(SETHELPERPOST_ARGS
);
5462 Cell
* c1
= m_stack
.allocC();
5463 memcpy(c1
, &to
, sizeof(TypedValue
));
5466 inline void OPTBLD_INLINE
VMExecutionContext::iopBindL(PC
& pc
) {
5469 Var
* fr
= m_stack
.topV();
5470 TypedValue
* to
= frame_local(m_fp
, local
);
5474 inline void OPTBLD_INLINE
VMExecutionContext::iopBindN(PC
& pc
) {
5477 TypedValue
* fr
= m_stack
.topTV();
5478 TypedValue
* nameTV
= m_stack
.indTV(1);
5479 TypedValue
* to
= nullptr;
5480 lookupd_var(m_fp
, name
, nameTV
, to
);
5481 assert(to
!= nullptr);
5483 memcpy((void*)nameTV
, (void*)fr
, sizeof(TypedValue
));
5488 inline void OPTBLD_INLINE
VMExecutionContext::iopBindG(PC
& pc
) {
5491 TypedValue
* fr
= m_stack
.topTV();
5492 TypedValue
* nameTV
= m_stack
.indTV(1);
5493 TypedValue
* to
= nullptr;
5494 lookupd_gbl(m_fp
, name
, nameTV
, to
);
5495 assert(to
!= nullptr);
5497 memcpy((void*)nameTV
, (void*)fr
, sizeof(TypedValue
));
5502 inline void OPTBLD_INLINE
VMExecutionContext::iopBindS(PC
& pc
) {
5504 TypedValue
* fr
= m_stack
.topTV();
5505 TypedValue
* classref
= m_stack
.indTV(1);
5506 TypedValue
* propn
= m_stack
.indTV(2);
5507 TypedValue
* output
= propn
;
5510 bool visible
, accessible
;
5511 lookup_sprop(m_fp
, classref
, name
, propn
, val
, visible
, accessible
);
5512 if (!(visible
&& accessible
)) {
5513 raise_error("Invalid static property access: %s::%s",
5514 classref
->m_data
.pcls
->name()->data(),
5518 tvRefcountedDecRefCell(propn
);
5519 memcpy(output
, fr
, sizeof(TypedValue
));
5520 m_stack
.ndiscard(2);
5524 inline void OPTBLD_INLINE
VMExecutionContext::iopBindM(PC
& pc
) {
5526 DECLARE_SETHELPER_ARGS
5527 TypedValue
* tv1
= m_stack
.topTV();
5528 if (!setHelperPre
<false, true, false, true, 1,
5529 ConsumeAll
>(MEMBERHELPERPRE_ARGS
)) {
5530 // Bind the element/property with the var on the top of the stack
5533 setHelperPost
<1>(SETHELPERPOST_ARGS
);
5536 inline void OPTBLD_INLINE
VMExecutionContext::iopUnsetL(PC
& pc
) {
5539 assert(local
< m_fp
->m_func
->numLocals());
5540 TypedValue
* tv
= frame_local(m_fp
, local
);
5541 tvRefcountedDecRef(tv
);
5545 inline void OPTBLD_INLINE
VMExecutionContext::iopUnsetN(PC
& pc
) {
5548 TypedValue
* tv1
= m_stack
.topTV();
5549 TypedValue
* tv
= nullptr;
5550 lookup_var(m_fp
, name
, tv1
, tv
);
5551 assert(!m_fp
->hasInvName());
5552 if (tv
!= nullptr) {
5553 tvRefcountedDecRef(tv
);
5560 inline void OPTBLD_INLINE
VMExecutionContext::iopUnsetG(PC
& pc
) {
5562 TypedValue
* tv1
= m_stack
.topTV();
5563 StringData
* name
= lookup_name(tv1
);
5564 VarEnv
* varEnv
= m_globalVarEnv
;
5565 assert(varEnv
!= nullptr);
5566 varEnv
->unset(name
);
5571 inline void OPTBLD_INLINE
VMExecutionContext::iopUnsetM(PC
& pc
) {
5573 DECLARE_SETHELPER_ARGS
5574 if (!setHelperPre
<false, false, true, false, 0,
5575 LeaveLast
>(MEMBERHELPERPRE_ARGS
)) {
5581 UnsetElem(base
, curMember
);
5586 Class
* ctx
= arGetContextClass(m_fp
);
5587 UnsetProp(ctx
, base
, curMember
);
5590 default: assert(false);
5593 setHelperPost
<0>(SETHELPERPOST_ARGS
);
5596 inline ActRec
* OPTBLD_INLINE
VMExecutionContext::fPushFuncImpl(
5599 DEBUGGER_IF(phpBreakpointEnabled(func
->name()->data()));
5600 ActRec
* ar
= m_stack
.allocA();
5603 ar
->initNumArgs(numArgs
);
5604 ar
->setVarEnv(nullptr);
5608 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushFunc(PC
& pc
) {
5610 DECODE_IVA(numArgs
);
5611 Cell
* c1
= m_stack
.topC();
5612 const Func
* func
= nullptr;
5613 ObjectData
* origObj
= nullptr;
5614 StringData
* origSd
= nullptr;
5615 if (IS_STRING_TYPE(c1
->m_type
)) {
5616 origSd
= c1
->m_data
.pstr
;
5617 func
= Unit::loadFunc(origSd
);
5618 } else if (c1
->m_type
== KindOfObject
) {
5619 static StringData
* invokeName
= StringData::GetStaticString("__invoke");
5620 origObj
= c1
->m_data
.pobj
;
5621 const Class
* cls
= origObj
->getVMClass();
5622 func
= cls
->lookupMethod(invokeName
);
5623 if (func
== nullptr) {
5624 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING
);
5627 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING
);
5629 if (func
== nullptr) {
5630 raise_error("Call to undefined function %s()", c1
->m_data
.pstr
->data());
5632 assert(!origObj
|| !origSd
);
5633 assert(origObj
|| origSd
);
5634 // We've already saved origObj or origSd; we'll use them after
5635 // overwriting the pointer on the stack. Don't refcount it now; defer
5636 // till after we're done with it.
5638 ActRec
* ar
= fPushFuncImpl(func
, numArgs
);
5640 if (func
->attrs() & AttrStatic
&& !func
->isClosureBody()) {
5641 ar
->setClass(origObj
->getVMClass());
5644 ar
->setThis(origObj
);
5645 // Teleport the reference from the destroyed stack cell to the
5646 // ActRec. Don't try this at home.
5649 ar
->setThis(nullptr);
5654 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushFuncD(PC
& pc
) {
5656 DECODE_IVA(numArgs
);
5658 const NamedEntityPair nep
= m_fp
->m_func
->unit()->lookupNamedEntityPairId(id
);
5659 Func
* func
= Unit::loadFunc(nep
.second
, nep
.first
);
5660 if (func
== nullptr) {
5661 raise_error("Call to undefined function %s()",
5662 m_fp
->m_func
->unit()->lookupLitstrId(id
)->data());
5664 ActRec
* ar
= fPushFuncImpl(func
, numArgs
);
5665 ar
->setThis(nullptr);
5668 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushFuncU(PC
& pc
) {
5670 DECODE_IVA(numArgs
);
5672 DECODE(Id
, globalFunc
);
5673 Unit
* unit
= m_fp
->m_func
->unit();
5674 const NamedEntityPair nep
= unit
->lookupNamedEntityPairId(nsFunc
);
5675 Func
* func
= Unit::loadFunc(nep
.second
, nep
.first
);
5676 if (func
== nullptr) {
5677 const NamedEntityPair nep2
= unit
->lookupNamedEntityPairId(globalFunc
);
5678 func
= Unit::loadFunc(nep2
.second
, nep2
.first
);
5679 if (func
== nullptr) {
5680 const char *funcName
= unit
->lookupLitstrId(nsFunc
)->data();
5681 raise_error("Call to undefined function %s()", funcName
);
5684 ActRec
* ar
= fPushFuncImpl(func
, numArgs
);
5685 ar
->setThis(nullptr);
5688 void VMExecutionContext::fPushObjMethodImpl(
5689 Class
* cls
, StringData
* name
, ObjectData
* obj
, int numArgs
) {
5691 LookupResult res
= lookupObjMethod(f
, cls
, name
, true);
5693 ActRec
* ar
= m_stack
.allocA();
5696 if (res
== MethodFoundNoThis
) {
5700 assert(res
== MethodFoundWithThis
|| res
== MagicCallFound
);
5701 /* Transfer ownership of obj to the ActRec*/
5704 ar
->initNumArgs(numArgs
);
5705 if (res
== MagicCallFound
) {
5706 ar
->setInvName(name
);
5708 ar
->setVarEnv(NULL
);
5713 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushObjMethod(PC
& pc
) {
5715 DECODE_IVA(numArgs
);
5716 Cell
* c1
= m_stack
.topC(); // Method name.
5717 if (!IS_STRING_TYPE(c1
->m_type
)) {
5718 raise_error(Strings::METHOD_NAME_MUST_BE_STRING
);
5720 Cell
* c2
= m_stack
.indC(1); // Object.
5721 if (c2
->m_type
!= KindOfObject
) {
5722 throw_call_non_object(c1
->m_data
.pstr
->data());
5724 ObjectData
* obj
= c2
->m_data
.pobj
;
5725 Class
* cls
= obj
->getVMClass();
5726 StringData
* name
= c1
->m_data
.pstr
;
5727 // We handle decReffing obj and name in fPushObjMethodImpl
5728 m_stack
.ndiscard(2);
5729 fPushObjMethodImpl(cls
, name
, obj
, numArgs
);
5732 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushObjMethodD(PC
& pc
) {
5734 DECODE_IVA(numArgs
);
5735 DECODE_LITSTR(name
);
5736 Cell
* c1
= m_stack
.topC();
5737 if (c1
->m_type
!= KindOfObject
) {
5738 throw_call_non_object(name
->data());
5740 ObjectData
* obj
= c1
->m_data
.pobj
;
5741 Class
* cls
= obj
->getVMClass();
5742 // We handle decReffing obj in fPushObjMethodImpl
5744 fPushObjMethodImpl(cls
, name
, obj
, numArgs
);
5747 template<bool forwarding
>
5748 void VMExecutionContext::pushClsMethodImpl(Class
* cls
,
5753 LookupResult res
= lookupClsMethod(f
, cls
, name
, obj
, true);
5754 if (res
== MethodFoundNoThis
|| res
== MagicCallStaticFound
) {
5758 assert(res
== MethodFoundWithThis
|| res
== MagicCallFound
);
5762 ActRec
* ar
= m_stack
.allocA();
5771 /* Propogate the current late bound class if there is one, */
5772 /* otherwise use the class given by this instruction's input */
5773 if (m_fp
->hasThis()) {
5774 cls
= m_fp
->getThis()->getVMClass();
5775 } else if (m_fp
->hasClass()) {
5776 cls
= m_fp
->getClass();
5781 ar
->initNumArgs(numArgs
);
5782 if (res
== MagicCallFound
|| res
== MagicCallStaticFound
) {
5783 ar
->setInvName(name
);
5785 ar
->setVarEnv(nullptr);
5786 decRefStr(const_cast<StringData
*>(name
));
5790 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushClsMethod(PC
& pc
) {
5792 DECODE_IVA(numArgs
);
5793 Cell
* c1
= m_stack
.indC(1); // Method name.
5794 if (!IS_STRING_TYPE(c1
->m_type
)) {
5795 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING
);
5797 TypedValue
* tv
= m_stack
.top();
5798 assert(tv
->m_type
== KindOfClass
);
5799 Class
* cls
= tv
->m_data
.pcls
;
5800 StringData
* name
= c1
->m_data
.pstr
;
5801 // CLSMETHOD_BODY will take care of decReffing name
5802 m_stack
.ndiscard(2);
5803 assert(cls
&& name
);
5804 ObjectData
* obj
= m_fp
->hasThis() ? m_fp
->getThis() : nullptr;
5805 pushClsMethodImpl
<false>(cls
, name
, obj
, numArgs
);
5808 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushClsMethodD(PC
& pc
) {
5810 DECODE_IVA(numArgs
);
5811 DECODE_LITSTR(name
);
5812 DECODE(Id
, classId
);
5813 const NamedEntityPair
&nep
=
5814 m_fp
->m_func
->unit()->lookupNamedEntityPairId(classId
);
5815 Class
* cls
= Unit::loadClass(nep
.second
, nep
.first
);
5816 if (cls
== nullptr) {
5817 raise_error(Strings::UNKNOWN_CLASS
, nep
.first
->data());
5819 ObjectData
* obj
= m_fp
->hasThis() ? m_fp
->getThis() : nullptr;
5820 pushClsMethodImpl
<false>(cls
, name
, obj
, numArgs
);
5823 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushClsMethodF(PC
& pc
) {
5825 DECODE_IVA(numArgs
);
5826 Cell
* c1
= m_stack
.indC(1); // Method name.
5827 if (!IS_STRING_TYPE(c1
->m_type
)) {
5828 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING
);
5830 TypedValue
* tv
= m_stack
.top();
5831 assert(tv
->m_type
== KindOfClass
);
5832 Class
* cls
= tv
->m_data
.pcls
;
5834 StringData
* name
= c1
->m_data
.pstr
;
5835 // CLSMETHOD_BODY will take care of decReffing name
5836 m_stack
.ndiscard(2);
5837 ObjectData
* obj
= m_fp
->hasThis() ? m_fp
->getThis() : nullptr;
5838 pushClsMethodImpl
<true>(cls
, name
, obj
, numArgs
);
5841 #undef CLSMETHOD_BODY
5843 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCtor(PC
& pc
) {
5845 DECODE_IVA(numArgs
);
5846 TypedValue
* tv
= m_stack
.topTV();
5847 assert(tv
->m_type
== KindOfClass
);
5848 Class
* cls
= tv
->m_data
.pcls
;
5849 assert(cls
!= nullptr);
5852 LookupResult res UNUSED
= lookupCtorMethod(f
, cls
, true);
5853 assert(res
== MethodFoundWithThis
);
5854 // Replace input with uninitialized instance.
5855 ObjectData
* this_
= newInstance(cls
);
5856 TRACE(2, "FPushCtor: just new'ed an instance of class %s: %p\n",
5857 cls
->name()->data(), this_
);
5858 this_
->incRefCount();
5859 this_
->incRefCount();
5860 tv
->m_type
= KindOfObject
;
5861 tv
->m_data
.pobj
= this_
;
5862 // Push new activation record.
5863 ActRec
* ar
= m_stack
.allocA();
5867 ar
->initNumArgs(numArgs
, true /* isFPushCtor */);
5869 ar
->setVarEnv(nullptr);
5872 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCtorD(PC
& pc
) {
5874 DECODE_IVA(numArgs
);
5876 const NamedEntityPair
&nep
=
5877 m_fp
->m_func
->unit()->lookupNamedEntityPairId(id
);
5878 Class
* cls
= Unit::loadClass(nep
.second
, nep
.first
);
5879 if (cls
== nullptr) {
5880 raise_error(Strings::UNKNOWN_CLASS
,
5881 m_fp
->m_func
->unit()->lookupLitstrId(id
)->data());
5885 LookupResult res UNUSED
= lookupCtorMethod(f
, cls
, true);
5886 assert(res
== MethodFoundWithThis
);
5887 // Push uninitialized instance.
5888 ObjectData
* this_
= newInstance(cls
);
5889 TRACE(2, "FPushCtorD: new'ed an instance of class %s: %p\n",
5890 cls
->name()->data(), this_
);
5891 this_
->incRefCount();
5892 m_stack
.pushObject(this_
);
5893 // Push new activation record.
5894 ActRec
* ar
= m_stack
.allocA();
5898 ar
->initNumArgs(numArgs
, true /* isFPushCtor */);
5899 ar
->setVarEnv(nullptr);
5902 inline void OPTBLD_INLINE
VMExecutionContext::iopDecodeCufIter(PC
& pc
) {
5906 DECODE(Offset
, offset
);
5908 Iter
* it
= frame_iter(m_fp
, itId
);
5909 CufIter
&cit
= it
->cuf();
5911 ObjectData
* obj
= nullptr;
5912 HPHP::Class
* cls
= nullptr;
5913 StringData
* invName
= nullptr;
5914 TypedValue
*func
= m_stack
.topTV();
5917 if (m_fp
->m_func
->isBuiltin()) {
5918 ar
= getOuterVMFrame(ar
);
5920 const Func
* f
= vm_decode_function(tvAsVariant(func
),
5926 pc
= origPc
+ offset
;
5935 cit
.setName(invName
);
5940 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCufIter(PC
& pc
) {
5942 DECODE_IVA(numArgs
);
5945 Iter
* it
= frame_iter(m_fp
, itId
);
5947 auto f
= it
->cuf().func();
5948 auto o
= it
->cuf().ctx();
5949 auto n
= it
->cuf().name();
5951 ActRec
* ar
= m_stack
.allocA();
5954 ar
->m_this
= (ObjectData
*)o
;
5955 if (o
&& !(uintptr_t(o
) & 1)) ar
->m_this
->incRefCount();
5960 ar
->setVarEnv(nullptr);
5962 ar
->initNumArgs(numArgs
, false /* isFPushCtor */);
5965 inline void OPTBLD_INLINE
VMExecutionContext::doFPushCuf(PC
& pc
,
5969 DECODE_IVA(numArgs
);
5971 TypedValue func
= m_stack
.topTV()[safe
];
5973 ObjectData
* obj
= nullptr;
5974 HPHP::Class
* cls
= nullptr;
5975 StringData
* invName
= nullptr;
5977 const Func
* f
= vm_decode_function(tvAsVariant(&func
), getFP(),
5982 if (safe
) m_stack
.topTV()[1] = m_stack
.topTV()[0];
5983 m_stack
.ndiscard(1);
5985 f
= SystemLib::s_nullFunc
;
5987 m_stack
.pushFalse();
5993 ActRec
* ar
= m_stack
.allocA();
6002 ar
->setThis(nullptr);
6004 ar
->initNumArgs(numArgs
, false /* isFPushCtor */);
6006 ar
->setInvName(invName
);
6008 ar
->setVarEnv(nullptr);
6010 tvRefcountedDecRef(&func
);
6013 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCuf(PC
& pc
) {
6014 doFPushCuf(pc
, false, false);
6017 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCufF(PC
& pc
) {
6018 doFPushCuf(pc
, true, false);
6021 inline void OPTBLD_INLINE
VMExecutionContext::iopFPushCufSafe(PC
& pc
) {
6022 doFPushCuf(pc
, false, true);
6025 static inline ActRec
* arFromInstr(TypedValue
* sp
, const Opcode
* pc
) {
6026 return arFromSpOffset((ActRec
*)sp
, instrSpToArDelta(pc
));
6029 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassC(PC
& pc
) {
6031 ActRec
* ar
= arFromInstr(m_stack
.top(), (Opcode
*)pc
);
6034 DECODE_IVA(paramId
);
6036 assert(paramId
< ar
->numArgs());
6040 #define FPASSC_CHECKED_PRELUDE \
6041 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc); \
6043 DECODE_IVA(paramId); \
6044 assert(paramId < ar->numArgs()); \
6045 const Func* func = ar->m_func;
6047 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassCW(PC
& pc
) {
6048 FPASSC_CHECKED_PRELUDE
6049 if (func
->mustBeRef(paramId
)) {
6050 TRACE(1, "FPassCW: function %s(%d) param %d is by reference, "
6051 "raising a strict warning (attr:0x%x)\n",
6052 func
->name()->data(), func
->numParams(), paramId
,
6053 func
->info() ? func
->info()->attribute
: 0);
6054 raise_strict_warning("Only variables should be passed by reference");
6058 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassCE(PC
& pc
) {
6059 FPASSC_CHECKED_PRELUDE
6060 if (func
->mustBeRef(paramId
)) {
6061 TRACE(1, "FPassCE: function %s(%d) param %d is by reference, "
6062 "throwing a fatal error (attr:0x%x)\n",
6063 func
->name()->data(), func
->numParams(), paramId
,
6064 func
->info() ? func
->info()->attribute
: 0);
6065 raise_error("Cannot pass parameter %d by reference", paramId
+1);
6069 #undef FPASSC_CHECKED_PRELUDE
6071 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassV(PC
& pc
) {
6072 ActRec
* ar
= arFromInstr(m_stack
.top(), (Opcode
*)pc
);
6074 DECODE_IVA(paramId
);
6075 assert(paramId
< ar
->numArgs());
6076 const Func
* func
= ar
->m_func
;
6077 if (!func
->byRef(paramId
)) {
6082 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassR(PC
& pc
) {
6083 ActRec
* ar
= arFromInstr(m_stack
.top(), (Opcode
*)pc
);
6085 DECODE_IVA(paramId
);
6086 assert(paramId
< ar
->numArgs());
6087 const Func
* func
= ar
->m_func
;
6088 if (func
->byRef(paramId
)) {
6089 TypedValue
* tv
= m_stack
.topTV();
6090 if (tv
->m_type
!= KindOfRef
) {
6094 if (m_stack
.topTV()->m_type
== KindOfRef
) {
6100 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassL(PC
& pc
) {
6101 ActRec
* ar
= arFromInstr(m_stack
.top(), (Opcode
*)pc
);
6103 DECODE_IVA(paramId
);
6105 assert(paramId
< ar
->numArgs());
6106 TypedValue
* fr
= frame_local(m_fp
, local
);
6107 TypedValue
* to
= m_stack
.allocTV();
6108 if (!ar
->m_func
->byRef(paramId
)) {
6109 cgetl_body(m_fp
, fr
, to
, local
);
6115 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassN(PC
& pc
) {
6116 ActRec
* ar
= arFromInstr(m_stack
.top(), (Opcode
*)pc
);
6119 DECODE_IVA(paramId
);
6120 assert(paramId
< ar
->numArgs());
6121 if (!ar
->m_func
->byRef(paramId
)) {
6128 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassG(PC
& pc
) {
6129 ActRec
* ar
= arFromInstr(m_stack
.top(), (Opcode
*)pc
);
6132 DECODE_IVA(paramId
);
6133 assert(paramId
< ar
->numArgs());
6134 if (!ar
->m_func
->byRef(paramId
)) {
6141 inline void OPTBLD_INLINE
VMExecutionContext::iopFPassS(PC
& pc
) {
6142 ActRec
* ar
= arFromInstr(m_stack
.top(), (Opcode
*)pc
);
6145 DECODE_IVA(paramId
);
6146 assert(paramId
< ar
->numArgs());
6147 if (!ar
->m_func
->byRef(paramId
)) {
6154 void VMExecutionContext::iopFPassM(PC
& pc
) {
6155 ActRec
* ar
= arFromInstr(m_stack
.top(), (Opcode
*)pc
);
6157 DECODE_IVA(paramId
);
6158 assert(paramId
< ar
->numArgs());
6159 if (!ar
->m_func
->byRef(paramId
)) {
6160 DECLARE_GETHELPER_ARGS
6161 getHelper(GETHELPER_ARGS
);
6162 if (tvRet
->m_type
== KindOfRef
) {
6166 DECLARE_SETHELPER_ARGS
6167 TypedValue
* tv1
= m_stack
.allocTV();
6169 if (!setHelperPre
<false, true, false, true, 1,
6170 ConsumeAll
>(MEMBERHELPERPRE_ARGS
)) {
6171 if (base
->m_type
!= KindOfRef
) {
6174 tvDupVar(base
, tv1
);
6179 setHelperPost
<1>(SETHELPERPOST_ARGS
);
6183 void VMExecutionContext::doFCall(ActRec
* ar
, PC
& pc
) {
6184 assert(getOuterVMFrame(ar
) == m_fp
);
6185 ar
->m_savedRip
= (uintptr_t)tx64
->getRetFromInterpretedFrame();
6186 assert(isReturnHelper(ar
->m_savedRip
));
6187 TRACE(3, "FCall: pc %p func %p base %d\n", m_pc
,
6188 m_fp
->m_func
->unit()->entry(),
6189 int(m_fp
->m_func
->base()));
6190 ar
->m_soff
= m_fp
->m_func
->unit()->offsetOf(pc
)
6191 - (uintptr_t)m_fp
->m_func
->base();
6192 assert(pcOff() >= m_fp
->m_func
->base());
6193 prepareFuncEntry(ar
, pc
);
6195 if (!EventHook::FunctionEnter(ar
, EventHook::NormalFunc
)) {
6200 inline void OPTBLD_INLINE
VMExecutionContext::iopFCall(PC
& pc
) {
6201 ActRec
* ar
= arFromInstr(m_stack
.top(), (Opcode
*)pc
);
6203 DECODE_IVA(numArgs
);
6204 assert(numArgs
== ar
->numArgs());
6205 checkStack(m_stack
, ar
->m_func
);
6209 // Return a function pointer type for calling a builtin with a given
6210 // return value and args.
6211 template<class Ret
, class... Args
> struct NativeFunction
{
6212 typedef Ret (*type
)(Args
...);
6215 // Recursively pack all parameters up to call a native builtin.
6216 template<class Ret
, size_t NArgs
, size_t CurArg
> struct NativeFuncCaller
;
6217 template<class Ret
, size_t NArgs
, size_t CurArg
> struct NativeFuncCaller
{
6218 template<class... Args
>
6219 static Ret
call(const Func
* func
, TypedValue
* tvs
, Args
... args
) {
6220 typedef NativeFuncCaller
<Ret
,NArgs
- 1,CurArg
+ 1> NextArgT
;
6221 DataType type
= func
->params()[CurArg
].builtinType();
6222 if (type
== KindOfDouble
) {
6223 // pass TV.m_data.dbl by value with C++ calling convention for doubles
6224 return NextArgT::call(func
, tvs
- 1, args
..., tvs
->m_data
.dbl
);
6226 if (type
== KindOfInt64
|| type
== KindOfBoolean
) {
6227 // pass TV.m_data.num by value
6228 return NextArgT::call(func
, tvs
- 1, args
..., tvs
->m_data
.num
);
6230 if (IS_STRING_TYPE(type
) || type
== KindOfArray
|| type
== KindOfObject
) {
6231 // pass ptr to TV.m_data for String&, Array&, or Object&
6232 return NextArgT::call(func
, tvs
- 1, args
..., &tvs
->m_data
);
6234 // final case is for passing full value as Variant&
6235 return NextArgT::call(func
, tvs
- 1, args
..., tvs
);
6238 template<class Ret
, size_t CurArg
> struct NativeFuncCaller
<Ret
,0,CurArg
> {
6239 template<class... Args
>
6240 static Ret
call(const Func
* f
, TypedValue
*, Args
... args
) {
6241 typedef typename NativeFunction
<Ret
,Args
...>::type FuncType
;
6242 return reinterpret_cast<FuncType
>(f
->nativeFuncPtr())(args
...);
6247 static Ret
makeNativeCall(const Func
* f
, TypedValue
* args
, size_t numArgs
) {
6248 static_assert(kMaxBuiltinArgs
== 5,
6249 "makeNativeCall needs updates for kMaxBuiltinArgs");
6251 case 0: return NativeFuncCaller
<Ret
,0,0>::call(f
, args
);
6252 case 1: return NativeFuncCaller
<Ret
,1,0>::call(f
, args
);
6253 case 2: return NativeFuncCaller
<Ret
,2,0>::call(f
, args
);
6254 case 3: return NativeFuncCaller
<Ret
,3,0>::call(f
, args
);
6255 case 4: return NativeFuncCaller
<Ret
,4,0>::call(f
, args
);
6256 case 5: return NativeFuncCaller
<Ret
,5,0>::call(f
, args
);
6257 default: assert(false);
6263 static int makeNativeRefCall(const Func
* f
, Ret
* ret
,
6264 TypedValue
* args
, size_t numArgs
) {
6266 case 0: return NativeFuncCaller
<int64_t,0,0>::call(f
, args
, ret
);
6267 case 1: return NativeFuncCaller
<int64_t,1,0>::call(f
, args
, ret
);
6268 case 2: return NativeFuncCaller
<int64_t,2,0>::call(f
, args
, ret
);
6269 case 3: return NativeFuncCaller
<int64_t,3,0>::call(f
, args
, ret
);
6270 case 4: return NativeFuncCaller
<int64_t,4,0>::call(f
, args
, ret
);
6271 case 5: return NativeFuncCaller
<int64_t,5,0>::call(f
, args
, ret
);
6272 default: assert(false);
6277 inline void OPTBLD_INLINE
VMExecutionContext::iopFCallBuiltin(PC
& pc
) {
6279 DECODE_IVA(numArgs
);
6280 DECODE_IVA(numNonDefault
);
6282 const NamedEntity
* ne
= m_fp
->m_func
->unit()->lookupNamedEntityId(id
);
6283 Func
* func
= Unit::lookupFunc(ne
);
6284 if (func
== nullptr) {
6285 raise_error("Undefined function: %s",
6286 m_fp
->m_func
->unit()->lookupLitstrId(id
)->data());
6288 TypedValue
* args
= m_stack
.indTV(numArgs
-1);
6289 assert(numArgs
== func
->numParams());
6290 for (int i
= 0; i
< numNonDefault
; i
++) {
6291 const Func::ParamInfo
& pi
= func
->params()[i
];
6293 #define CASE(kind) case KindOf ## kind : do { \
6294 tvCastTo ## kind ## InPlace(&args[-i]); break; \
6297 switch (pi
.builtinType()) {
6313 ret
.m_type
= func
->returnType();
6314 switch (func
->returnType()) {
6316 ret
.m_data
.num
= makeNativeCall
<bool>(func
, args
, numArgs
);
6318 case KindOfNull
: /* void return type */
6320 ret
.m_data
.num
= makeNativeCall
<int64_t>(func
, args
, numArgs
);
6323 case KindOfStaticString
:
6326 makeNativeRefCall(func
, &ret
.m_data
, args
, numArgs
);
6327 if (ret
.m_data
.num
== 0) {
6328 ret
.m_type
= KindOfNull
;
6332 makeNativeRefCall(func
, &ret
, args
, numArgs
);
6333 if (ret
.m_type
== KindOfUninit
) {
6334 ret
.m_type
= KindOfNull
;
6341 frame_free_args(args
, numNonDefault
);
6342 m_stack
.ndiscard(numArgs
- 1);
6344 memcpy(m_stack
.top(), &ret
, sizeof(TypedValue
));
6347 bool VMExecutionContext::prepareArrayArgs(ActRec
* ar
,
6349 if (UNLIKELY(ar
->hasInvName())) {
6350 m_stack
.pushStringNoRc(ar
->getInvName());
6351 m_stack
.pushArray(args
);
6357 int nargs
= args
->size();
6358 const Func
* f
= ar
->m_func
;
6359 int nparams
= f
->numParams();
6360 int extra
= nargs
- nparams
;
6365 ssize_t pos
= args
->iter_begin();
6366 for (int i
= 0; i
< nparams
; ++i
) {
6367 TypedValue
* from
= const_cast<TypedValue
*>(
6368 args
->getValueRef(pos
).asTypedValue());
6369 TypedValue
* to
= m_stack
.allocTV();
6370 if (UNLIKELY(f
->byRef(i
))) {
6371 if (UNLIKELY(!tvAsVariant(from
).isReferenced())) {
6372 raise_warning("Parameter %d to %s() expected to be a reference, "
6373 "value given", i
+ 1, f
->fullName()->data());
6374 if (skipCufOnInvalidParams
) {
6376 while (i
--) m_stack
.popTV();
6385 if (UNLIKELY(to
->m_type
== KindOfRef
)) {
6389 pos
= args
->iter_advance(pos
);
6391 if (extra
&& (ar
->m_func
->attrs() & AttrMayUseVV
)) {
6392 ExtraArgs
* extraArgs
= ExtraArgs::allocateUninit(extra
);
6393 for (int i
= 0; i
< extra
; ++i
) {
6394 TypedValue
* to
= extraArgs
->getExtraArg(i
);
6395 tvDup(args
->getValueRef(pos
).asTypedValue(), to
);
6396 if (to
->m_type
== KindOfRef
&& to
->m_data
.pref
->_count
== 2) {
6399 pos
= args
->iter_advance(pos
);
6401 ar
->setExtraArgs(extraArgs
);
6402 ar
->initNumArgs(nargs
);
6404 ar
->initNumArgs(nparams
);
6410 static void cleanupParamsAndActRec(Stack
& stack
,
6412 ExtraArgs
* extraArgs
) {
6413 assert(stack
.top() + (extraArgs
?
6414 ar
->m_func
->numParams() :
6415 ar
->numArgs()) == (void*)ar
);
6417 const int numExtra
= ar
->numArgs() - ar
->m_func
->numParams();
6418 ExtraArgs::deallocate(extraArgs
, numExtra
);
6420 while (stack
.top() != (void*)ar
) {
6426 bool VMExecutionContext::doFCallArray(PC
& pc
) {
6427 ActRec
* ar
= (ActRec
*)(m_stack
.top() + 1);
6428 assert(ar
->numArgs() == 1);
6430 Cell
* c1
= m_stack
.topC();
6431 if (skipCufOnInvalidParams
&& UNLIKELY(c1
->m_type
!= KindOfArray
)) {
6433 // this is what we /should/ do, but our code base depends
6434 // on the broken behavior of casting the second arg to an
6436 cleanupParamsAndActRec(m_stack
, ar
, nullptr);
6438 raise_warning("call_user_func_array() expects parameter 2 to be array");
6442 const Func
* func
= ar
->m_func
;
6444 Array
args(LIKELY(c1
->m_type
== KindOfArray
) ? c1
->m_data
.parr
:
6445 tvAsVariant(c1
).toArray().get());
6447 checkStack(m_stack
, func
);
6449 assert(ar
->m_savedRbp
== (uint64_t)m_fp
);
6450 assert(!ar
->m_func
->isGenerator());
6451 ar
->m_savedRip
= (uintptr_t)tx64
->getRetFromInterpretedFrame();
6452 assert(isReturnHelper(ar
->m_savedRip
));
6453 TRACE(3, "FCallArray: pc %p func %p base %d\n", m_pc
,
6454 m_fp
->m_func
->unit()->entry(),
6455 int(m_fp
->m_func
->base()));
6456 ar
->m_soff
= m_fp
->m_func
->unit()->offsetOf(pc
)
6457 - (uintptr_t)m_fp
->m_func
->base();
6458 assert(pcOff() > m_fp
->m_func
->base());
6460 if (UNLIKELY(!prepareArrayArgs(ar
, args
.get()))) return false;
6463 if (UNLIKELY(!(prepareFuncEntry(ar
, pc
)))) {
6467 if (UNLIKELY(!EventHook::FunctionEnter(ar
, EventHook::NormalFunc
))) {
6474 inline void OPTBLD_INLINE
VMExecutionContext::iopFCallArray(PC
& pc
) {
6476 (void)doFCallArray(pc
);
6479 inline void OPTBLD_INLINE
VMExecutionContext::iopCufSafeArray(PC
& pc
) {
6482 ret
.append(tvAsVariant(m_stack
.top() + 1));
6483 ret
.appendWithRef(tvAsVariant(m_stack
.top() + 0));
6486 tvAsVariant(m_stack
.top()) = ret
;
6489 inline void OPTBLD_INLINE
VMExecutionContext::iopCufSafeReturn(PC
& pc
) {
6491 bool ok
= tvAsVariant(m_stack
.top() + 1).toBoolean();
6492 tvRefcountedDecRef(m_stack
.top() + 1);
6493 tvRefcountedDecRef(m_stack
.top() + (ok
? 2 : 0));
6494 if (ok
) m_stack
.top()[2] = m_stack
.top()[0];
6495 m_stack
.ndiscard(2);
6498 inline bool VMExecutionContext::initIterator(PC
& pc
, PC
& origPc
, Iter
* it
,
6499 Offset offset
, Cell
* c1
) {
6500 bool hasElems
= it
->init(c1
);
6508 inline void OPTBLD_INLINE
VMExecutionContext::iopIterInit(PC
& pc
) {
6512 DECODE(Offset
, offset
);
6514 Cell
* c1
= m_stack
.topC();
6515 Iter
* it
= frame_iter(m_fp
, itId
);
6516 TypedValue
* tv1
= frame_local(m_fp
, val
);
6517 if (initIterator(pc
, origPc
, it
, offset
, c1
)) {
6518 tvAsVariant(tv1
) = it
->arr().second();
6522 inline void OPTBLD_INLINE
VMExecutionContext::iopIterInitK(PC
& pc
) {
6526 DECODE(Offset
, offset
);
6529 Cell
* c1
= m_stack
.topC();
6530 Iter
* it
= frame_iter(m_fp
, itId
);
6531 TypedValue
* tv1
= frame_local(m_fp
, val
);
6532 TypedValue
* tv2
= frame_local(m_fp
, key
);
6533 if (initIterator(pc
, origPc
, it
, offset
, c1
)) {
6534 tvAsVariant(tv1
) = it
->arr().second();
6535 tvAsVariant(tv2
) = it
->arr().first();
6539 inline void OPTBLD_INLINE
VMExecutionContext::iopWIterInit(PC
& pc
) {
6543 DECODE(Offset
, offset
);
6545 Cell
* c1
= m_stack
.topC();
6546 Iter
* it
= frame_iter(m_fp
, itId
);
6547 TypedValue
* tv1
= frame_local(m_fp
, val
);
6548 if (initIterator(pc
, origPc
, it
, offset
, c1
)) {
6549 tvAsVariant(tv1
) = withRefBind(it
->arr().secondRef());
6553 inline void OPTBLD_INLINE
VMExecutionContext::iopWIterInitK(PC
& pc
) {
6557 DECODE(Offset
, offset
);
6560 Cell
* c1
= m_stack
.topC();
6561 Iter
* it
= frame_iter(m_fp
, itId
);
6562 TypedValue
* tv1
= frame_local(m_fp
, val
);
6563 TypedValue
* tv2
= frame_local(m_fp
, key
);
6564 if (initIterator(pc
, origPc
, it
, offset
, c1
)) {
6565 tvAsVariant(tv1
) = withRefBind(it
->arr().secondRef());
6566 tvAsVariant(tv2
) = it
->arr().first();
6570 inline bool VMExecutionContext::initIteratorM(PC
& pc
, PC
& origPc
, Iter
* it
,
6571 Offset offset
, Var
* v1
) {
6572 bool hasElems
= it
->minit(v1
);
6580 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterInit(PC
& pc
) {
6584 DECODE(Offset
, offset
);
6586 Var
* v1
= m_stack
.topV();
6587 assert(v1
->m_type
== KindOfRef
);
6588 Iter
* it
= frame_iter(m_fp
, itId
);
6589 TypedValue
* tv1
= frame_local(m_fp
, val
);
6590 if (initIteratorM(pc
, origPc
, it
, offset
, v1
)) {
6591 tvAsVariant(tv1
).assignRef(it
->marr().val());
6595 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterInitK(PC
& pc
) {
6599 DECODE(Offset
, offset
);
6602 Var
* v1
= m_stack
.topV();
6603 assert(v1
->m_type
== KindOfRef
);
6604 Iter
* it
= frame_iter(m_fp
, itId
);
6605 TypedValue
* tv1
= frame_local(m_fp
, val
);
6606 TypedValue
* tv2
= frame_local(m_fp
, key
);
6607 if (initIteratorM(pc
, origPc
, it
, offset
, v1
)) {
6608 tvAsVariant(tv1
).assignRef(it
->marr().val());
6609 tvAsVariant(tv2
) = it
->marr().key();
6613 inline void OPTBLD_INLINE
VMExecutionContext::iopIterNext(PC
& pc
) {
6617 DECODE(Offset
, offset
);
6619 Iter
* it
= frame_iter(m_fp
, itId
);
6620 TypedValue
* tv1
= frame_local(m_fp
, val
);
6623 tvAsVariant(tv1
) = it
->arr().second();
6627 inline void OPTBLD_INLINE
VMExecutionContext::iopIterNextK(PC
& pc
) {
6631 DECODE(Offset
, offset
);
6634 Iter
* it
= frame_iter(m_fp
, itId
);
6635 TypedValue
* tv1
= frame_local(m_fp
, val
);
6636 TypedValue
* tv2
= frame_local(m_fp
, key
);
6639 tvAsVariant(tv1
) = it
->arr().second();
6640 tvAsVariant(tv2
) = it
->arr().first();
6644 inline void OPTBLD_INLINE
VMExecutionContext::iopWIterNext(PC
& pc
) {
6648 DECODE(Offset
, offset
);
6650 Iter
* it
= frame_iter(m_fp
, itId
);
6651 TypedValue
* tv1
= frame_local(m_fp
, val
);
6654 tvAsVariant(tv1
) = withRefBind(it
->arr().secondRef());
6658 inline void OPTBLD_INLINE
VMExecutionContext::iopWIterNextK(PC
& pc
) {
6662 DECODE(Offset
, offset
);
6665 Iter
* it
= frame_iter(m_fp
, itId
);
6666 TypedValue
* tv1
= frame_local(m_fp
, val
);
6667 TypedValue
* tv2
= frame_local(m_fp
, key
);
6670 tvAsVariant(tv1
) = withRefBind(it
->arr().secondRef());
6671 tvAsVariant(tv2
) = it
->arr().first();
6675 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterNext(PC
& pc
) {
6679 DECODE(Offset
, offset
);
6681 Iter
* it
= frame_iter(m_fp
, itId
);
6682 TypedValue
* tv1
= frame_local(m_fp
, val
);
6685 tvAsVariant(tv1
).assignRef(it
->marr().val());
6689 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterNextK(PC
& pc
) {
6693 DECODE(Offset
, offset
);
6696 Iter
* it
= frame_iter(m_fp
, itId
);
6697 TypedValue
* tv1
= frame_local(m_fp
, val
);
6698 TypedValue
* tv2
= frame_local(m_fp
, key
);
6701 tvAsVariant(tv1
).assignRef(it
->marr().val());
6702 tvAsVariant(tv2
) = it
->marr().key();
6706 inline void OPTBLD_INLINE
VMExecutionContext::iopIterFree(PC
& pc
) {
6709 Iter
* it
= frame_iter(m_fp
, itId
);
6713 inline void OPTBLD_INLINE
VMExecutionContext::iopMIterFree(PC
& pc
) {
6716 Iter
* it
= frame_iter(m_fp
, itId
);
6720 inline void OPTBLD_INLINE
VMExecutionContext::iopCIterFree(PC
& pc
) {
6723 Iter
* it
= frame_iter(m_fp
, itId
);
6727 inline void OPTBLD_INLINE
inclOp(VMExecutionContext
*ec
, PC
&pc
,
6728 InclOpFlags flags
) {
6730 Cell
* c1
= ec
->m_stack
.topC();
6731 String
path(prepareKey(c1
));
6733 TRACE(2, "inclOp %s %s %s %s %s \"%s\"\n",
6734 flags
& InclOpOnce
? "Once" : "",
6735 flags
& InclOpDocRoot
? "DocRoot" : "",
6736 flags
& InclOpRelative
? "Relative" : "",
6737 flags
& InclOpLocal
? "Local" : "",
6738 flags
& InclOpFatal
? "Fatal" : "",
6741 Unit
* u
= flags
& (InclOpDocRoot
|InclOpRelative
) ?
6742 ec
->evalIncludeRoot(path
.get(), flags
, &initial
) :
6743 ec
->evalInclude(path
.get(), ec
->m_fp
->m_func
->unit()->filepath(), &initial
);
6746 ((flags
& InclOpFatal
) ?
6747 (void (*)(const char *, ...))raise_error
:
6748 (void (*)(const char *, ...))raise_warning
)("File not found: %s",
6750 ec
->m_stack
.pushFalse();
6752 if (!(flags
& InclOpOnce
) || initial
) {
6753 ec
->evalUnit(u
, (flags
& InclOpLocal
), pc
, EventHook::PseudoMain
);
6755 Stats::inc(Stats::PseudoMain_Guarded
);
6756 ec
->m_stack
.pushTrue();
6761 inline void OPTBLD_INLINE
VMExecutionContext::iopIncl(PC
& pc
) {
6762 inclOp(this, pc
, InclOpDefault
);
6765 inline void OPTBLD_INLINE
VMExecutionContext::iopInclOnce(PC
& pc
) {
6766 inclOp(this, pc
, InclOpOnce
);
6769 inline void OPTBLD_INLINE
VMExecutionContext::iopReq(PC
& pc
) {
6770 inclOp(this, pc
, InclOpFatal
);
6773 inline void OPTBLD_INLINE
VMExecutionContext::iopReqOnce(PC
& pc
) {
6774 inclOp(this, pc
, InclOpFatal
| InclOpOnce
);
6777 inline void OPTBLD_INLINE
VMExecutionContext::iopReqDoc(PC
& pc
) {
6778 inclOp(this, pc
, InclOpFatal
| InclOpOnce
| InclOpDocRoot
);
6781 inline void OPTBLD_INLINE
VMExecutionContext::iopEval(PC
& pc
) {
6783 Cell
* c1
= m_stack
.topC();
6784 String
code(prepareKey(c1
));
6785 String prefixedCode
= concat("<?php ", code
);
6786 Unit
* unit
= compileEvalString(prefixedCode
.get());
6787 if (unit
== nullptr) {
6788 raise_error("Syntax error in eval()");
6791 evalUnit(unit
, false, pc
, EventHook::Eval
);
6794 inline void OPTBLD_INLINE
VMExecutionContext::iopDefFunc(PC
& pc
) {
6797 Func
* f
= m_fp
->m_func
->unit()->lookupFuncId(fid
);
6801 inline void OPTBLD_INLINE
VMExecutionContext::iopDefCls(PC
& pc
) {
6804 PreClass
* c
= m_fp
->m_func
->unit()->lookupPreClassId(cid
);
6808 inline void OPTBLD_INLINE
VMExecutionContext::iopDefTypedef(PC
& pc
) {
6811 m_fp
->m_func
->unit()->defTypedef(tid
);
6814 static inline void checkThis(ActRec
* fp
) {
6815 if (!fp
->hasThis()) {
6816 raise_error(Strings::FATAL_NULL_THIS
);
6820 inline void OPTBLD_INLINE
VMExecutionContext::iopThis(PC
& pc
) {
6823 ObjectData
* this_
= m_fp
->getThis();
6824 m_stack
.pushObject(this_
);
6827 inline void OPTBLD_INLINE
VMExecutionContext::iopBareThis(PC
& pc
) {
6829 DECODE(unsigned char, notice
);
6830 if (m_fp
->hasThis()) {
6831 ObjectData
* this_
= m_fp
->getThis();
6832 m_stack
.pushObject(this_
);
6835 if (notice
) raise_notice(Strings::WARN_NULL_THIS
);
6839 inline void OPTBLD_INLINE
VMExecutionContext::iopCheckThis(PC
& pc
) {
6844 inline void OPTBLD_INLINE
VMExecutionContext::iopInitThisLoc(PC
& pc
) {
6847 TypedValue
* thisLoc
= frame_local(m_fp
, id
);
6848 tvRefcountedDecRef(thisLoc
);
6849 if (m_fp
->hasThis()) {
6850 thisLoc
->m_data
.pobj
= m_fp
->getThis();
6851 thisLoc
->m_type
= KindOfObject
;
6854 tvWriteUninit(thisLoc
);
6859 * Helper for StaticLoc and StaticLocInit.
6862 lookupStatic(StringData
* name
,
6864 TypedValue
*&val
, bool& inited
) {
6865 HphpArray
* map
= get_static_locals(fp
);
6866 assert(map
!= nullptr);
6867 val
= map
->nvGet(name
);
6868 if (val
== nullptr) {
6871 map
->nvSet(name
, &tv
, false);
6872 val
= map
->nvGet(name
);
6879 inline void OPTBLD_INLINE
VMExecutionContext::iopStaticLoc(PC
& pc
) {
6881 DECODE_IVA(localId
);
6883 TypedValue
* fr
= nullptr;
6885 lookupStatic(var
, m_fp
, fr
, inited
);
6886 assert(fr
!= nullptr);
6887 if (fr
->m_type
!= KindOfRef
) {
6891 TypedValue
* tvLocal
= frame_local(m_fp
, localId
);
6892 tvBind(fr
, tvLocal
);
6896 m_stack
.pushFalse();
6900 inline void OPTBLD_INLINE
VMExecutionContext::iopStaticLocInit(PC
& pc
) {
6902 DECODE_IVA(localId
);
6904 TypedValue
* fr
= nullptr;
6906 lookupStatic(var
, m_fp
, fr
, inited
);
6907 assert(fr
!= nullptr);
6909 Cell
* initVal
= m_stack
.topC();
6912 if (fr
->m_type
!= KindOfRef
) {
6916 TypedValue
* tvLocal
= frame_local(m_fp
, localId
);
6917 tvBind(fr
, tvLocal
);
6921 inline void OPTBLD_INLINE
VMExecutionContext::iopCatch(PC
& pc
) {
6923 assert(m_faults
.size() > 0);
6924 Fault fault
= m_faults
.back();
6925 m_faults
.pop_back();
6926 assert(fault
.m_faultType
== Fault::UserException
);
6927 m_stack
.pushObjectNoRc(fault
.m_userException
);
6930 inline void OPTBLD_INLINE
VMExecutionContext::iopLateBoundCls(PC
& pc
) {
6932 Class
* cls
= frameStaticClass(m_fp
);
6934 raise_error(HPHP::Strings::CANT_ACCESS_STATIC
);
6936 m_stack
.pushClass(cls
);
6939 inline void OPTBLD_INLINE
VMExecutionContext::iopVerifyParamType(PC
& pc
) {
6940 SYNC(); // We might need m_pc to be updated to throw.
6944 const Func
*func
= m_fp
->m_func
;
6945 assert(param
< func
->numParams());
6946 assert(func
->numParams() == int(func
->params().size()));
6947 const TypeConstraint
& tc
= func
->params()[param
].typeConstraint();
6948 assert(tc
.exists());
6949 const TypedValue
*tv
= frame_local(m_fp
, param
);
6950 tc
.verify(tv
, func
, param
);
6953 inline void OPTBLD_INLINE
VMExecutionContext::iopNativeImpl(PC
& pc
) {
6955 uint soff
= m_fp
->m_soff
;
6956 BuiltinFunction func
= m_fp
->m_func
->builtinFuncPtr();
6958 // Actually call the native implementation. This will handle freeing the
6959 // locals in the normal case. In the case of an exception, the VM unwinder
6960 // will take care of it.
6962 // Adjust the stack; the native implementation put the return value in the
6963 // right place for us already
6964 m_stack
.ndiscard(m_fp
->m_func
->numSlotsInFrame());
6965 ActRec
* sfp
= m_fp
->arGetSfp();
6966 if (LIKELY(sfp
!= m_fp
)) {
6967 // Restore caller's execution state.
6969 pc
= m_fp
->m_func
->unit()->entry() + m_fp
->m_func
->base() + soff
;
6972 // No caller; terminate.
6976 std::ostringstream os
;
6977 m_stack
.toStringElm(os
, m_stack
.topTV(), m_fp
);
6979 Trace::trace("Return %s from VMExecutionContext::dispatch("
6980 "%p)\n", os
.str().c_str(), m_fp
));
6987 inline void OPTBLD_INLINE
VMExecutionContext::iopHighInvalid(PC
& pc
) {
6988 fprintf(stderr
, "invalid bytecode executed\n");
6992 inline void OPTBLD_INLINE
VMExecutionContext::iopSelf(PC
& pc
) {
6994 Class
* clss
= arGetContextClass(m_fp
);
6996 raise_error(HPHP::Strings::CANT_ACCESS_SELF
);
6998 m_stack
.pushClass(clss
);
7001 inline void OPTBLD_INLINE
VMExecutionContext::iopParent(PC
& pc
) {
7003 Class
* clss
= arGetContextClass(m_fp
);
7005 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS
);
7007 Class
* parent
= clss
->parent();
7009 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT
);
7011 m_stack
.pushClass(parent
);
7014 inline void OPTBLD_INLINE
VMExecutionContext::iopCreateCl(PC
& pc
) {
7016 DECODE_IVA(numArgs
);
7017 DECODE_LITSTR(clsName
);
7018 Class
* cls
= Unit::loadClass(clsName
);
7019 c_Closure
* cl
= static_cast<c_Closure
*>(newInstance(cls
));
7020 c_Closure
* cl2
= cl
->init(numArgs
, m_fp
, m_stack
.top());
7021 m_stack
.ndiscard(numArgs
);
7023 m_stack
.pushObject(cl2
);
7026 template<bool isMethod
>
7028 VMExecutionContext::createContinuationHelper(const Func
* origFunc
,
7029 const Func
* genFunc
,
7030 ObjectData
* thisPtr
,
7032 Class
* frameStaticCls
) {
7033 auto const cont
= c_Continuation::alloc(
7034 SystemLib::s_ContinuationClass
,
7035 genFunc
->numLocals(),
7036 genFunc
->numIterators()
7038 cont
->incRefCount();
7039 cont
->setNoDestruct();
7040 cont
->init(origFunc
, thisPtr
, args
);
7042 // The ActRec corresponding to the generator body lives as long as the object
7043 // does. We set it up once, here, and then just change FP to point to it when
7044 // we enter the generator body.
7045 ActRec
* ar
= cont
->actRec();
7048 if (origFunc
->isClosureBody()) {
7049 genFunc
= genFunc
->cloneAndSetClass(origFunc
->cls());
7053 ar
->setThis(thisPtr
);
7054 thisPtr
->incRefCount();
7056 ar
->setClass(frameStaticCls
);
7059 ar
->setThis(nullptr);
7062 ar
->m_func
= genFunc
;
7064 ar
->setVarEnv(nullptr);
7066 TypedValue
* contLocal
= frame_local(ar
, 0);
7067 contLocal
->m_type
= KindOfObject
;
7068 contLocal
->m_data
.pobj
= cont
;
7069 // Do not incref the continuation here! Doing so will create a reference
7070 // cycle, since this reference is a local in the continuation frame and thus
7071 // will be decreffed when the continuation is destroyed. The corresponding
7072 // non-decref is in ~c_Continuation.
7077 template<bool isMethod
>
7079 VMExecutionContext::createContinuation(ActRec
* fp
,
7081 const Func
* origFunc
,
7082 const Func
* genFunc
) {
7083 ObjectData
* const thisPtr
= fp
->hasThis() ? fp
->getThis() : nullptr;
7087 args
= hhvm_get_frame_args(fp
);
7090 return createContinuationHelper
<isMethod
>(
7095 frameStaticClass(fp
)
7099 static inline void setContVar(const Func
* genFunc
,
7100 const StringData
* name
,
7102 c_Continuation
* cont
) {
7103 Id destId
= genFunc
->lookupVarId(name
);
7104 if (destId
!= kInvalidId
) {
7105 tvDup(src
, frame_local(cont
->actRec(), destId
));
7107 ActRec
*contFP
= cont
->actRec();
7108 if (!contFP
->hasVarEnv()) {
7109 // We pass skipInsert to this VarEnv because it's going to exist
7110 // independent of the chain; i.e. we can't stack-allocate it. We link it
7111 // into the chain in UnpackCont, and take it out in PackCont.
7112 contFP
->setVarEnv(VarEnv::createLazyAttach(contFP
, true));
7114 contFP
->getVarEnv()->setWithRef(name
, src
);
7118 static const StaticString
s_this("this");
7121 VMExecutionContext::fillContinuationVars(ActRec
* fp
,
7122 const Func
* origFunc
,
7123 const Func
* genFunc
,
7124 c_Continuation
* cont
) {
7125 // For functions that contain only named locals, the variable
7126 // environment is saved and restored by teleporting the values (and
7127 // their references) between the evaluation stack and the local
7128 // space at the end of the object using memcpy. Any variables in a
7129 // VarEnv are saved and restored from m_vars as usual.
7130 static const StringData
* thisStr
= s_this
.get();
7131 int nLocals
= genFunc
->numLocals();
7133 if (fp
->hasVarEnv()) {
7134 Stats::inc(Stats::Cont_CreateVerySlow
);
7135 Array definedVariables
= fp
->getVarEnv()->getDefinedVariables();
7136 skipThis
= definedVariables
.exists(s_this
, true);
7138 for (ArrayIter
iter(definedVariables
); !iter
.end(); iter
.next()) {
7139 setContVar(genFunc
, iter
.first().getStringData(),
7140 const_cast<TypedValue
*>(iter
.secondRef().asTypedValue()), cont
);
7143 skipThis
= origFunc
->lookupVarId(thisStr
) != kInvalidId
;
7144 for (Id i
= 0; i
< origFunc
->numNamedLocals(); ++i
) {
7145 setContVar(genFunc
, origFunc
->localVarName(i
),
7146 frame_local(fp
, i
), cont
);
7150 // If $this is used as a local inside the body and is not provided
7151 // by our containing environment, just prefill it here instead of
7152 // using InitThisLoc inside the body
7153 if (!skipThis
&& cont
->m_obj
.get()) {
7154 Id id
= genFunc
->lookupVarId(thisStr
);
7155 if (id
!= kInvalidId
) {
7156 tvAsVariant(&cont
->locals()[nLocals
- id
- 1]) = cont
->m_obj
;
7162 // Explicitly instantiate for hhbctranslator.o and codegen.o
7163 template c_Continuation
* VMExecutionContext::createContinuation
<true>(
7164 ActRec
*, bool, const Func
*, const Func
*);
7165 template c_Continuation
* VMExecutionContext::createContinuation
<false>(
7166 ActRec
*, bool, const Func
*, const Func
*);
7167 template c_Continuation
* VMExecutionContext::createContinuationHelper
<true>(
7168 const Func
*, const Func
*, ObjectData
*, ArrayData
*, Class
*);
7169 template c_Continuation
* VMExecutionContext::createContinuationHelper
<false>(
7170 const Func
*, const Func
*, ObjectData
*, ArrayData
*, Class
*);
7172 inline void OPTBLD_INLINE
VMExecutionContext::iopCreateCont(PC
& pc
) {
7174 DECODE_IVA(getArgs
);
7175 DECODE_LITSTR(genName
);
7177 const Func
* origFunc
= m_fp
->m_func
;
7178 const Func
* genFunc
= origFunc
->getGeneratorBody(genName
);
7179 assert(genFunc
!= nullptr);
7181 bool isMethod
= origFunc
->isMethod();
7182 c_Continuation
* cont
= isMethod
?
7183 createContinuation
<true>(m_fp
, getArgs
, origFunc
, genFunc
) :
7184 createContinuation
<false>(m_fp
, getArgs
, origFunc
, genFunc
);
7186 fillContinuationVars(m_fp
, origFunc
, genFunc
, cont
);
7188 TypedValue
* ret
= m_stack
.allocTV();
7189 ret
->m_type
= KindOfObject
;
7190 ret
->m_data
.pobj
= cont
;
7193 static inline c_Continuation
* frame_continuation(ActRec
* fp
) {
7194 ObjectData
* obj
= frame_local(fp
, 0)->m_data
.pobj
;
7195 assert(dynamic_cast<c_Continuation
*>(obj
));
7196 return static_cast<c_Continuation
*>(obj
);
7199 static inline c_Continuation
* this_continuation(ActRec
* fp
) {
7200 ObjectData
* obj
= fp
->getThis();
7201 assert(dynamic_cast<c_Continuation
*>(obj
));
7202 return static_cast<c_Continuation
*>(obj
);
7205 void VMExecutionContext::iopContEnter(PC
& pc
) {
7208 // The stack must be empty! Or else generatorStackBase() won't work!
7209 assert(m_stack
.top() == (TypedValue
*)m_fp
- m_fp
->m_func
->numSlotsInFrame());
7211 // Do linkage of the continuation's AR.
7212 assert(m_fp
->hasThis());
7213 c_Continuation
* cont
= this_continuation(m_fp
);
7214 ActRec
* contAR
= cont
->actRec();
7215 arSetSfp(contAR
, m_fp
);
7217 contAR
->m_soff
= m_fp
->m_func
->unit()->offsetOf(pc
)
7218 - (uintptr_t)m_fp
->m_func
->base();
7219 contAR
->m_savedRip
= (uintptr_t)tx64
->getRetFromInterpretedGeneratorFrame();
7220 assert(isReturnHelper(contAR
->m_savedRip
));
7223 pc
= contAR
->m_func
->getEntry();
7226 if (UNLIKELY(!EventHook::FunctionEnter(contAR
, EventHook::NormalFunc
))) {
7231 void VMExecutionContext::iopContExit(PC
& pc
) {
7234 EventHook::FunctionExit(m_fp
);
7235 ActRec
* prevFp
= m_fp
->arGetSfp();
7236 pc
= prevFp
->m_func
->getEntry() + m_fp
->m_soff
;
7240 void VMExecutionContext::unpackContVarEnvLinkage(ActRec
* fp
) {
7241 // This is called from the TC, and is assumed not to reenter.
7242 if (fp
->hasVarEnv()) {
7243 VarEnv
*& topVE
= g_vmContext
->m_topVarEnv
;
7244 fp
->getVarEnv()->setPrevious(topVE
);
7245 topVE
= fp
->getVarEnv();
7249 inline void OPTBLD_INLINE
VMExecutionContext::iopUnpackCont(PC
& pc
) {
7251 c_Continuation
* cont
= frame_continuation(m_fp
);
7253 unpackContVarEnvLinkage(m_fp
);
7255 // Return the received value
7256 TypedValue
* recv_to
= m_stack
.allocTV();
7257 TypedValue
* recv_fr
= cont
->m_received
.asTypedValue();
7258 memcpy(recv_to
, recv_fr
, sizeof(TypedValue
));
7259 tvWriteNull(recv_fr
);
7261 // Return the label in a stack cell
7262 TypedValue
* label
= m_stack
.allocTV();
7263 label
->m_type
= KindOfInt64
;
7264 label
->m_data
.num
= cont
->m_label
;
7267 void VMExecutionContext::packContVarEnvLinkage(ActRec
* fp
) {
7268 if (fp
->hasVarEnv()) {
7269 g_vmContext
->m_topVarEnv
= fp
->getVarEnv()->previous();
7273 inline void OPTBLD_INLINE
VMExecutionContext::iopPackCont(PC
& pc
) {
7276 c_Continuation
* cont
= frame_continuation(m_fp
);
7278 packContVarEnvLinkage(m_fp
);
7279 cont
->c_Continuation::t_update(label
, tvAsCVarRef(m_stack
.topTV()));
7283 inline void OPTBLD_INLINE
VMExecutionContext::iopContRetC(PC
& pc
) {
7285 c_Continuation
* cont
= frame_continuation(m_fp
);
7286 cont
->setDone(true);
7287 tvSetIgnoreRef(m_stack
.topC(), cont
->m_value
.asTypedValue());
7290 EventHook::FunctionExit(m_fp
);
7291 ActRec
* prevFp
= m_fp
->arGetSfp();
7292 pc
= prevFp
->m_func
->getEntry() + m_fp
->m_soff
;
7296 inline void OPTBLD_INLINE
VMExecutionContext::iopContNext(PC
& pc
) {
7298 c_Continuation
* cont
= this_continuation(m_fp
);
7300 cont
->m_received
.setNull();
7303 template<bool raise
>
7304 inline void VMExecutionContext::contSendImpl() {
7305 c_Continuation
* cont
= this_continuation(m_fp
);
7306 cont
->startedCheck();
7308 cont
->m_received
.assignVal(tvAsVariant(frame_local(m_fp
, 0)));
7310 assert(cont
->m_label
);
7315 inline void OPTBLD_INLINE
VMExecutionContext::iopContSend(PC
& pc
) {
7317 contSendImpl
<false>();
7320 inline void OPTBLD_INLINE
VMExecutionContext::iopContRaise(PC
& pc
) {
7322 contSendImpl
<true>();
7325 inline void OPTBLD_INLINE
VMExecutionContext::iopContValid(PC
& pc
) {
7327 TypedValue
* tv
= m_stack
.allocTV();
7329 tvAsVariant(tv
) = !this_continuation(m_fp
)->done();
7332 inline void OPTBLD_INLINE
VMExecutionContext::iopContCurrent(PC
& pc
) {
7334 c_Continuation
* cont
= this_continuation(m_fp
);
7335 cont
->startedCheck();
7337 TypedValue
* tv
= m_stack
.allocTV();
7339 tvAsVariant(tv
) = cont
->m_value
;
7342 inline void OPTBLD_INLINE
VMExecutionContext::iopContStopped(PC
& pc
) {
7344 this_continuation(m_fp
)->setRunning(false);
7347 inline void OPTBLD_INLINE
VMExecutionContext::iopContHandle(PC
& pc
) {
7349 c_Continuation
* cont
= this_continuation(m_fp
);
7350 cont
->setRunning(false);
7351 cont
->setDone(true);
7352 cont
->m_value
.setNull();
7354 Variant exn
= tvAsVariant(m_stack
.topTV());
7356 assert(exn
.asObjRef().instanceof(SystemLib::s_ExceptionClass
));
7357 throw exn
.asObjRef();
7360 inline void OPTBLD_INLINE
VMExecutionContext::iopStrlen(PC
& pc
) {
7362 TypedValue
* subj
= m_stack
.topTV();
7363 if (LIKELY(IS_STRING_TYPE(subj
->m_type
))) {
7364 int64_t ans
= subj
->m_data
.pstr
->size();
7365 tvRefcountedDecRef(subj
);
7366 subj
->m_type
= KindOfInt64
;
7367 subj
->m_data
.num
= ans
;
7369 Variant ans
= f_strlen(tvAsVariant(subj
));
7370 tvAsVariant(subj
) = ans
;
7374 inline void OPTBLD_INLINE
VMExecutionContext::iopIncStat(PC
& pc
) {
7376 DECODE_IVA(counter
);
7378 Stats::inc(Stats::StatCounter(counter
), value
);
7381 void VMExecutionContext::classExistsImpl(PC
& pc
, Attr typeAttr
) {
7383 TypedValue
* aloadTV
= m_stack
.topTV();
7384 tvCastToBooleanInPlace(aloadTV
);
7385 assert(aloadTV
->m_type
== KindOfBoolean
);
7386 bool autoload
= aloadTV
->m_data
.num
;
7389 TypedValue
* name
= m_stack
.topTV();
7390 tvCastToStringInPlace(name
);
7391 assert(IS_STRING_TYPE(name
->m_type
));
7393 tvAsVariant(name
) = Unit::classExists(name
->m_data
.pstr
, autoload
, typeAttr
);
7396 inline void OPTBLD_INLINE
VMExecutionContext::iopClassExists(PC
& pc
) {
7397 classExistsImpl(pc
, AttrNone
);
7400 inline void OPTBLD_INLINE
VMExecutionContext::iopInterfaceExists(PC
& pc
) {
7401 classExistsImpl(pc
, AttrInterface
);
7404 inline void OPTBLD_INLINE
VMExecutionContext::iopTraitExists(PC
& pc
) {
7405 classExistsImpl(pc
, AttrTrait
);
7409 VMExecutionContext::prettyStack(const string
& prefix
) const {
7411 string
s("__Halted");
7414 int offset
= (m_fp
->m_func
->unit() != nullptr)
7417 string begPrefix
= prefix
+ "__";
7418 string midPrefix
= prefix
+ "|| ";
7419 string endPrefix
= prefix
+ "\\/";
7420 string stack
= m_stack
.toString(m_fp
, offset
, midPrefix
);
7421 return begPrefix
+ "\n" + stack
+ endPrefix
;
7424 void VMExecutionContext::checkRegStateWork() const {
7425 assert(tl_regState
== REGSTATE_CLEAN
);
7428 void VMExecutionContext::DumpStack() {
7429 string s
= g_vmContext
->prettyStack("");
7430 fprintf(stderr
, "%s\n", s
.c_str());
7433 void VMExecutionContext::DumpCurUnit(int skip
) {
7434 ActRec
* fp
= g_vmContext
->getFP();
7435 Offset pc
= fp
->m_func
->unit() ? g_vmContext
->pcOff() : 0;
7437 fp
= g_vmContext
->getPrevVMState(fp
, &pc
);
7439 if (fp
== nullptr) {
7440 std::cout
<< "Don't have a valid fp\n";
7444 printf("Offset = %d, in function %s\n", pc
, fp
->m_func
->name()->data());
7445 Unit
* u
= fp
->m_func
->unit();
7447 std::cout
<< "Current unit is NULL\n";
7450 printf("Dumping bytecode for %s(%p)\n", u
->filepath()->data(), u
);
7451 std::cout
<< u
->toString();
7454 void VMExecutionContext::PrintTCCallerInfo() {
7456 ActRec
* fp
= g_vmContext
->getFP();
7457 Unit
* u
= fp
->m_func
->unit();
7458 fprintf(stderr
, "Called from TC address %p\n",
7459 TranslatorX64::Get()->getTranslatedCaller());
7460 std::cerr
<< u
->filepath()->data() << ':'
7461 << u
->getLineNumber(u
->offsetOf(g_vmContext
->getPC())) << std::endl
;
7465 condStackTraceSep(const char* pfx
) {
7467 "========================================"
7468 "========================================\n",
7472 #define COND_STACKTRACE(pfx) \
7474 string stack = prettyStack(pfx); \
7475 Trace::trace("%s\n", stack.c_str());)
7477 #define O(name, imm, pusph, pop, flags) \
7478 void VMExecutionContext::op##name() { \
7479 condStackTraceSep("op"#name" "); \
7480 COND_STACKTRACE("op"#name" pre: "); \
7482 assert(*pc == Op##name); \
7484 int offset = m_fp->m_func->unit()->offsetOf(pc); \
7485 Trace::trace("op"#name" offset: %d\n", offset)); \
7488 COND_STACKTRACE("op"#name" post: "); \
7489 condStackTraceSep("op"#name" "); \
7498 profileReturnValue(const DataType dt
) {
7499 const Func
* f
= curFunc();
7500 if (f
->isPseudoMain() || f
->isClosureBody() || f
->isMagic() ||
7501 Func::isSpecial(f
->name()))
7503 recordType(TypeProfileKey(TypeProfileKey::MethodName
, f
->name()), dt
);
7506 template <int dispatchFlags
>
7507 inline void VMExecutionContext::dispatchImpl(int numInstrs
) {
7508 static const bool limInstrs
= dispatchFlags
& LimitInstrs
;
7509 static const bool breakOnCtlFlow
= dispatchFlags
& BreakOnCtlFlow
;
7510 static const bool profile
= dispatchFlags
& Profile
;
7511 static const void *optabDirect
[] = {
7512 #define O(name, imm, push, pop, flags) \
7517 static const void *optabDbg
[] = {
7518 #define O(name, imm, push, pop, flags) \
7523 static const void *optabCover
[] = {
7524 #define O(name, imm, push, pop, flags) \
7529 assert(sizeof(optabDirect
) / sizeof(const void *) == Op_count
);
7530 assert(sizeof(optabDbg
) / sizeof(const void *) == Op_count
);
7531 const void **optab
= optabDirect
;
7532 bool collectCoverage
= ThreadInfo::s_threadInfo
->
7533 m_reqInjectionData
.getCoverage();
7534 if (collectCoverage
) {
7537 DEBUGGER_ATTACHED_ONLY(optab
= optabDbg
);
7539 * Trace-only mapping of opcodes to names.
7542 static const char *nametab
[] = {
7543 #define O(name, imm, push, pop, flags) \
7548 #endif /* HPHP_TRACE */
7549 bool isCtlFlow
= false;
7551 #define DISPATCH() do { \
7552 if ((breakOnCtlFlow && isCtlFlow) || \
7553 (limInstrs && UNLIKELY(numInstrs-- == 0))) { \
7555 Trace::trace("dispatch: Halt ExecutionContext::dispatch(%p)\n", \
7557 delete g_vmContext->m_lastLocFilter; \
7558 g_vmContext->m_lastLocFilter = nullptr; \
7562 COND_STACKTRACE("dispatch: "); \
7564 Trace::trace("dispatch: %d: %s\n", pcOff(), nametab[op])); \
7565 assert(op < Op_count); \
7566 if (profile && (op == OpRetC || op == OpRetV)) { \
7567 profileReturnValue(m_stack.top()->m_type); \
7572 ONTRACE(1, Trace::trace("dispatch: Enter ExecutionContext::dispatch(%p)\n",
7577 #define O(name, imm, pusph, pop, flags) \
7579 phpDebuggerOpcodeHook(pc); \
7581 if (collectCoverage) { \
7582 recordCodeCoverage(pc); \
7587 if (breakOnCtlFlow) { \
7588 isCtlFlow = instrIsControlFlow(Op##name); \
7589 Stats::incOp(Op##name); \
7591 const Op op = Op##name; \
7592 if (op == OpRetC || op == OpRetV || op == OpNativeImpl) { \
7593 if (UNLIKELY(!pc)) { m_fp = 0; return; } \
7602 class InterpretingFlagGuard
{
7606 InterpretingFlagGuard() {
7607 m_oldFlag
= g_vmContext
->m_interpreting
;
7608 g_vmContext
->m_interpreting
= true;
7610 ~InterpretingFlagGuard() {
7611 g_vmContext
->m_interpreting
= m_oldFlag
;
7615 void VMExecutionContext::dispatch() {
7616 InterpretingFlagGuard ifg
;
7617 if (shouldProfile()) {
7618 dispatchImpl
<Profile
>(0);
7624 void VMExecutionContext::dispatchN(int numInstrs
) {
7625 InterpretingFlagGuard ifg
;
7626 dispatchImpl
<LimitInstrs
| BreakOnCtlFlow
>(numInstrs
);
7627 // We are about to go back to Jit, check whether we should
7628 // stick with interpreter
7629 if (DEBUGGER_FORCE_INTR
) {
7630 throw VMSwitchModeException(false);
7634 void VMExecutionContext::dispatchBB() {
7635 InterpretingFlagGuard ifg
;
7636 dispatchImpl
<BreakOnCtlFlow
>(0);
7637 // We are about to go back to Jit, check whether we should
7638 // stick with interpreter
7639 if (DEBUGGER_FORCE_INTR
) {
7640 throw VMSwitchModeException(false);
7644 void VMExecutionContext::recordCodeCoverage(PC pc
) {
7645 Unit
* unit
= getFP()->m_func
->unit();
7646 assert(unit
!= nullptr);
7647 if (unit
== SystemLib::s_nativeFuncUnit
||
7648 unit
== SystemLib::s_nativeClassUnit
) {
7651 int line
= unit
->getLineNumber(pcOff());
7654 if (unit
!= m_coverPrevUnit
|| line
!= m_coverPrevLine
) {
7655 ThreadInfo
* info
= ThreadInfo::s_threadInfo
.getNoCheck();
7656 m_coverPrevUnit
= unit
;
7657 m_coverPrevLine
= line
;
7658 const StringData
* filepath
= unit
->filepath();
7659 assert(filepath
->isStatic());
7660 info
->m_coverage
->Record(filepath
->data(), line
, line
);
7664 void VMExecutionContext::resetCoverageCounters() {
7665 m_coverPrevLine
= -1;
7666 m_coverPrevUnit
= nullptr;
7669 void VMExecutionContext::pushVMState(VMState
&savedVM
,
7670 const ActRec
* reentryAR
) {
7671 if (debug
&& savedVM
.fp
&&
7672 savedVM
.fp
->m_func
&&
7673 savedVM
.fp
->m_func
->unit()) {
7674 // Some asserts and tracing.
7675 const Func
* func
= savedVM
.fp
->m_func
;
7676 (void) /* bound-check asserts in offsetOf */
7677 func
->unit()->offsetOf(savedVM
.pc
);
7678 TRACE(3, "pushVMState: saving frame %s pc %p off %d fp %p\n",
7679 func
->name()->data(),
7681 func
->unit()->offsetOf(savedVM
.pc
),
7684 m_nestedVMs
.push_back(ReentryRecord(savedVM
, reentryAR
));
7688 void VMExecutionContext::popVMState() {
7689 assert(m_nestedVMs
.size() >= 1);
7691 VMState
&savedVM
= m_nestedVMs
.back().m_savedState
;
7694 m_firstAR
= savedVM
.firstAR
;
7695 assert(m_stack
.top() == savedVM
.sp
);
7699 savedVM
.fp
->m_func
&&
7700 savedVM
.fp
->m_func
->unit()) {
7701 const Func
* func
= savedVM
.fp
->m_func
;
7702 (void) /* bound-check asserts in offsetOf */
7703 func
->unit()->offsetOf(savedVM
.pc
);
7704 TRACE(3, "popVMState: restoring frame %s pc %p off %d fp %p\n",
7705 func
->name()->data(),
7707 func
->unit()->offsetOf(savedVM
.pc
),
7712 m_nestedVMs
.pop_back();
7716 void VMExecutionContext::requestInit() {
7717 assert(SystemLib::s_unit
);
7718 assert(SystemLib::s_nativeFuncUnit
);
7719 assert(SystemLib::s_nativeClassUnit
);
7721 new (&s_requestArenaStorage
) RequestArena();
7722 new (&s_varEnvArenaStorage
) VarEnvArena();
7724 VarEnv::createGlobal();
7725 m_stack
.requestInit();
7727 tx64
->requestInit();
7729 if (UNLIKELY(RuntimeOption::EvalJitEnableRenameFunction
)) {
7730 SystemLib::s_unit
->merge();
7731 if (SystemLib::s_hhas_unit
) SystemLib::s_hhas_unit
->merge();
7732 SystemLib::s_nativeFuncUnit
->merge();
7733 SystemLib::s_nativeClassUnit
->merge();
7735 // System units are always merge only, and
7736 // everything is persistent.
7737 assert(SystemLib::s_unit
->isEmpty());
7738 assert(!SystemLib::s_hhas_unit
|| SystemLib::s_hhas_unit
->isEmpty());
7739 assert(SystemLib::s_nativeFuncUnit
->isEmpty());
7740 assert(SystemLib::s_nativeClassUnit
->isEmpty());
7743 profileRequestStart();
7746 Class
* cls
= Unit::GetNamedEntity(s_stdclass
.get())->clsList();
7748 assert(cls
== SystemLib::s_stdclassClass
);
7752 void VMExecutionContext::requestExit() {
7753 treadmillSharedVars();
7756 tx64
->requestExit();
7758 m_stack
.requestExit();
7759 profileRequestEnd();
7760 EventHook::Disable();
7762 if (m_globalVarEnv
) {
7763 assert(m_topVarEnv
= m_globalVarEnv
);
7764 VarEnv::destroy(m_globalVarEnv
);
7765 m_globalVarEnv
= m_topVarEnv
= 0;
7768 varenv_arena().~VarEnvArena();
7769 request_arena().~RequestArena();
7772 ///////////////////////////////////////////////////////////////////////////////