Clean up VectorEffects::init
[hiphop-php.git] / hphp / runtime / vm / bytecode.cpp
blob7370875c14a728eabaf2cb75c8cc81ebef18096c
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2013 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/vm/bytecode.h"
18 #include <algorithm>
19 #include <string>
20 #include <vector>
21 #include <sstream>
23 #include "folly/String.h"
25 #include "hphp/runtime/base/tv_comparisons.h"
26 #include "hphp/runtime/base/tv_conversions.h"
27 #include "hphp/runtime/base/tv_arith.h"
28 #include "hphp/compiler/builtin_symbols.h"
29 #include "hphp/runtime/vm/event_hook.h"
30 #include "hphp/runtime/vm/jit/translator.h"
31 #include "hphp/runtime/vm/srckey.h"
32 #include "hphp/runtime/vm/member_operations.h"
33 #include "hphp/runtime/base/class_info.h"
34 #include "hphp/runtime/base/code_coverage.h"
35 #include "hphp/runtime/base/file_repository.h"
36 #include "hphp/runtime/base/base_includes.h"
37 #include "hphp/runtime/base/execution_context.h"
38 #include "hphp/runtime/base/runtime_option.h"
39 #include "hphp/runtime/base/array/hphp_array.h"
40 #include "hphp/runtime/base/strings.h"
41 #include "hphp/util/util.h"
42 #include "hphp/util/trace.h"
43 #include "hphp/util/debug.h"
44 #include "hphp/runtime/base/stat_cache.h"
45 #include "hphp/runtime/base/shared/shared_variant.h"
46 #include "hphp/runtime/vm/debug/debug.h"
48 #include "hphp/runtime/vm/hhbc.h"
49 #include "hphp/runtime/vm/treadmill.h"
50 #include "hphp/runtime/vm/php_debug.h"
51 #include "hphp/runtime/vm/debugger_hook.h"
52 #include "hphp/runtime/vm/runtime.h"
53 #include "hphp/runtime/vm/jit/targetcache.h"
54 #include "hphp/runtime/vm/type_constraint.h"
55 #include "hphp/runtime/vm/unwind.h"
56 #include "hphp/runtime/vm/jit/translator-inline.h"
57 #include "hphp/runtime/ext/ext_string.h"
58 #include "hphp/runtime/ext/ext_error.h"
59 #include "hphp/runtime/ext/ext_closure.h"
60 #include "hphp/runtime/ext/ext_continuation.h"
61 #include "hphp/runtime/ext/ext_function.h"
62 #include "hphp/runtime/ext/ext_variable.h"
63 #include "hphp/runtime/ext/ext_array.h"
64 #include "hphp/runtime/base/stats.h"
65 #include "hphp/runtime/vm/type_profile.h"
66 #include "hphp/runtime/base/server/source_root_info.h"
67 #include "hphp/runtime/base/util/extended_logger.h"
68 #include "hphp/runtime/base/memory/tracer.h"
70 #include "hphp/system/systemlib.h"
71 #include "hphp/runtime/ext/ext_collections.h"
73 #include "hphp/runtime/vm/name_value_table_wrapper.h"
74 #include "hphp/runtime/vm/request_arena.h"
75 #include "hphp/util/arena.h"
77 #include <iostream>
78 #include <iomanip>
79 #include <algorithm>
80 #include <boost/format.hpp>
81 #include <boost/utility/typed_in_place_factory.hpp>
83 #include <cinttypes>
85 #include <libgen.h>
86 #include <sys/mman.h>
88 namespace HPHP {
90 // TODO: #1746957, #1756122
91 // we should skip the call in call_user_func_array, if
92 // by reference params are passed by value, or if its
93 // argument is not an array, but currently lots of tests
94 // depend on actually making the call.
95 const bool skipCufOnInvalidParams = false;
97 // RepoAuthoritative has been raptured out of runtime_option.cpp. It needs
98 // to be closer to other bytecode.cpp data.
99 bool RuntimeOption::RepoAuthoritative = false;
101 using std::string;
103 using Transl::VMRegAnchor;
104 using Transl::EagerVMRegAnchor;
106 #if DEBUG
107 #define OPTBLD_INLINE
108 #else
109 #define OPTBLD_INLINE ALWAYS_INLINE
110 #endif
111 TRACE_SET_MOD(bcinterp);
113 ActRec* ActRec::arGetSfp() const {
114 ActRec* prevFrame = (ActRec*)m_savedRbp;
115 if (LIKELY(((uintptr_t)prevFrame - Util::s_stackLimit) >=
116 Util::s_stackSize)) {
117 if (LIKELY(prevFrame != nullptr)) return prevFrame;
120 return const_cast<ActRec*>(this);
123 bool
124 ActRec::skipFrame() const {
125 return m_func && m_func->skipFrame();
128 template <>
129 Class* arGetContextClassImpl<false>(const ActRec* ar) {
130 if (ar == nullptr) {
131 return nullptr;
133 return ar->m_func->cls();
136 template <>
137 Class* arGetContextClassImpl<true>(const ActRec* ar) {
138 if (ar == nullptr) {
139 return nullptr;
141 if (ar->m_func->isPseudoMain() || ar->m_func->isBuiltin()) {
142 // Pseudomains inherit the context of their caller
143 VMExecutionContext* context = g_vmContext;
144 ar = context->getPrevVMState(ar);
145 while (ar != nullptr &&
146 (ar->m_func->isPseudoMain() || ar->m_func->isBuiltin())) {
147 ar = context->getPrevVMState(ar);
149 if (ar == nullptr) {
150 return nullptr;
153 return ar->m_func->cls();
156 const StaticString s_call_user_func("call_user_func");
157 const StaticString s_call_user_func_array("call_user_func_array");
158 const StaticString s_stdclass("stdclass");
159 const StaticString s___call("__call");
160 const StaticString s___callStatic("__callStatic");
161 const StaticString s_file("file");
162 const StaticString s_line("line");
163 const StaticString s_function("function");
164 const StaticString s_args("args");
165 const StaticString s_class("class");
166 const StaticString s_object("object");
167 const StaticString s_type("type");
168 const StaticString s_include("include");
170 static inline
171 Transl::Translator* tx() {
172 return Transl::Translator::Get();
175 ///////////////////////////////////////////////////////////////////////////////
177 //=============================================================================
178 // Miscellaneous macros.
180 #define NEXT() pc++
181 #define DECODE_JMP(type, var) \
182 type var __attribute__((unused)) = *(type*)pc; \
183 ONTRACE(2, \
184 Trace::trace("decode: Immediate %s %" PRIi64"\n", #type, \
185 (int64_t)var));
186 #define ITER_SKIP(offset) pc = origPc + (offset);
188 #define DECODE(type, var) \
189 DECODE_JMP(type, var); \
190 pc += sizeof(type)
191 #define DECODE_IVA(var) \
192 int32_t var UNUSED = decodeVariableSizeImm(&pc); \
193 ONTRACE(2, \
194 Trace::trace("decode: Immediate int32 %" PRIi64"\n", \
195 (int64_t)var));
196 #define DECODE_LITSTR(var) \
197 StringData* var; \
198 do { \
199 DECODE(Id, id); \
200 var = m_fp->m_func->unit()->lookupLitstrId(id); \
201 } while (false)
203 #define DECODE_HA(var) DECODE_IVA(var)
204 #define DECODE_IA(var) DECODE_IVA(var)
206 #define DECODE_ITER_LIST(typeList, idList, vecLen) \
207 DECODE(int32_t, vecLen); \
208 assert(vecLen > 0); \
209 Id* typeList = (Id*)pc; \
210 Id* idList = (Id*)pc + 1; \
211 pc += 2 * vecLen * sizeof(Id);
213 #define SYNC() m_pc = pc
215 //=============================================================================
216 // Miscellaneous helpers.
218 static inline Class* frameStaticClass(ActRec* fp) {
219 if (fp->hasThis()) {
220 return fp->getThis()->getVMClass();
221 } else if (fp->hasClass()) {
222 return fp->getClass();
223 } else {
224 return nullptr;
228 //=============================================================================
229 // VarEnv.
231 VarEnv::VarEnv()
232 : m_depth(0)
233 , m_malloced(false)
234 , m_global(false)
235 , m_cfp(0)
236 , m_nvTable(boost::in_place<NameValueTable>(
237 RuntimeOption::EvalVMInitialGlobalTableSize))
239 TypedValue globalArray;
240 globalArray.m_type = KindOfArray;
241 globalArray.m_data.parr =
242 new (request_arena()) GlobalNameValueTableWrapper(&*m_nvTable);
243 globalArray.m_data.parr->incRefCount();
244 m_nvTable->set(StringData::GetStaticString("GLOBALS"), &globalArray);
245 tvRefcountedDecRef(&globalArray);
248 VarEnv::VarEnv(ActRec* fp, ExtraArgs* eArgs)
249 : m_extraArgs(eArgs)
250 , m_depth(1)
251 , m_malloced(false)
252 , m_global(false)
253 , m_cfp(fp)
255 const Func* func = fp->m_func;
256 const Id numNames = func->numNamedLocals();
258 if (!numNames) return;
260 m_nvTable = boost::in_place<NameValueTable>(numNames);
262 TypedValue** origLocs =
263 reinterpret_cast<TypedValue**>(uintptr_t(this) + sizeof(VarEnv));
264 TypedValue* loc = frame_local(fp, 0);
265 for (Id i = 0; i < numNames; ++i, --loc) {
266 assert(func->lookupVarId(func->localVarName(i)) == (int)i);
267 origLocs[i] = m_nvTable->migrateSet(func->localVarName(i), loc);
271 VarEnv::~VarEnv() {
272 TRACE(3, "Destroying VarEnv %p [%s]\n",
273 this,
274 isGlobalScope() ? "global scope" : "local scope");
275 assert(m_restoreLocations.empty());
277 if (!isGlobalScope()) {
278 if (LIKELY(!m_malloced)) {
279 varenv_arena().endFrame();
280 return;
282 } else {
284 * When detaching the global scope, we leak any live objects (and
285 * let the smart allocator clean them up). This is because we're
286 * not supposed to run destructors for objects that are live at
287 * the end of a request.
289 m_nvTable->leak();
293 size_t VarEnv::getObjectSz(ActRec* fp) {
294 return sizeof(VarEnv) + sizeof(TypedValue*) * fp->m_func->numNamedLocals();
297 VarEnv* VarEnv::createLocalOnStack(ActRec* fp) {
298 auto& va = varenv_arena();
299 va.beginFrame();
300 void* mem = va.alloc(getObjectSz(fp));
301 VarEnv* ret = new (mem) VarEnv(fp, fp->getExtraArgs());
302 TRACE(3, "Creating lazily attached VarEnv %p on stack\n", mem);
303 return ret;
306 VarEnv* VarEnv::createLocalOnHeap(ActRec* fp) {
307 void* mem = malloc(getObjectSz(fp));
308 VarEnv* ret = new (mem) VarEnv(fp, fp->getExtraArgs());
309 TRACE(3, "Creating lazily attached VarEnv %p on heap\n", mem);
310 ret->m_malloced = true;
311 return ret;
314 VarEnv* VarEnv::createGlobal() {
315 assert(!g_vmContext->m_globalVarEnv);
317 VarEnv* ret = new (request_arena()) VarEnv();
318 TRACE(3, "Creating VarEnv %p [global scope]\n", ret);
319 ret->m_global = true;
320 g_vmContext->m_globalVarEnv = ret;
321 return ret;
324 void VarEnv::destroy(VarEnv* ve) {
325 bool malloced = ve->m_malloced;
326 ve->~VarEnv();
327 if (UNLIKELY(malloced)) free(ve);
330 void VarEnv::attach(ActRec* fp) {
331 TRACE(3, "Attaching VarEnv %p [%s] %d fp @%p\n",
332 this,
333 isGlobalScope() ? "global scope" : "local scope",
334 int(fp->m_func->numNamedLocals()), fp);
335 assert(m_depth == 0 || fp->arGetSfp() == m_cfp ||
336 (fp->arGetSfp() == fp && g_vmContext->isNested()));
337 m_cfp = fp;
338 m_depth++;
340 // Overlay fp's locals, if it has any.
342 const Func* func = fp->m_func;
343 const Id numNames = func->numNamedLocals();
344 if (!numNames) {
345 return;
347 if (!m_nvTable) {
348 m_nvTable = boost::in_place<NameValueTable>(numNames);
351 TypedValue** origLocs = new (varenv_arena()) TypedValue*[
352 func->numNamedLocals()];
353 TypedValue* loc = frame_local(fp, 0);
354 for (Id i = 0; i < numNames; ++i, --loc) {
355 assert(func->lookupVarId(func->localVarName(i)) == (int)i);
356 origLocs[i] = m_nvTable->migrate(func->localVarName(i), loc);
358 m_restoreLocations.push_back(origLocs);
361 void VarEnv::detach(ActRec* fp) {
362 TRACE(3, "Detaching VarEnv %p [%s] @%p\n",
363 this,
364 isGlobalScope() ? "global scope" : "local scope",
365 fp);
366 assert(fp == m_cfp);
367 assert(m_depth > 0);
369 // Merge/remove fp's overlaid locals, if it had any.
370 const Func* func = fp->m_func;
371 if (Id const numLocals = func->numNamedLocals()) {
373 * In the case of a lazily attached VarEnv, we have our locations
374 * for the first (lazy) attach stored immediately following the
375 * VarEnv in memory. In this case m_restoreLocations will be empty.
377 assert((!isGlobalScope() && m_depth == 1) == m_restoreLocations.empty());
378 TypedValue** origLocs =
379 !m_restoreLocations.empty()
380 ? m_restoreLocations.back()
381 : reinterpret_cast<TypedValue**>(uintptr_t(this) + sizeof(VarEnv));
383 for (Id i = 0; i < numLocals; i++) {
384 m_nvTable->resettle(func->localVarName(i), origLocs[i]);
386 if (!m_restoreLocations.empty()) {
387 m_restoreLocations.pop_back();
391 VMExecutionContext* context = g_vmContext;
392 m_cfp = context->getPrevVMState(fp);
393 m_depth--;
394 if (m_depth == 0) {
395 m_cfp = nullptr;
396 // don't free global varEnv
397 if (context->m_globalVarEnv != this) {
398 assert(!isGlobalScope());
399 destroy(this);
404 // This helper is creating a NVT because of dynamic variable accesses,
405 // even though we're already attached to a frame and it had no named
406 // locals.
407 void VarEnv::ensureNvt() {
408 const size_t kLazyNvtSize = 3;
409 if (!m_nvTable) {
410 m_nvTable = boost::in_place<NameValueTable>(kLazyNvtSize);
414 void VarEnv::set(const StringData* name, TypedValue* tv) {
415 ensureNvt();
416 m_nvTable->set(name, tv);
419 void VarEnv::bind(const StringData* name, TypedValue* tv) {
420 ensureNvt();
421 m_nvTable->bind(name, tv);
424 void VarEnv::setWithRef(const StringData* name, TypedValue* tv) {
425 if (tv->m_type == KindOfRef) {
426 bind(name, tv);
427 } else {
428 set(name, tv);
432 TypedValue* VarEnv::lookup(const StringData* name) {
433 if (!m_nvTable) {
434 return 0;
436 return m_nvTable->lookup(name);
439 TypedValue* VarEnv::lookupAdd(const StringData* name) {
440 ensureNvt();
441 return m_nvTable->lookupAdd(name);
444 TypedValue* VarEnv::lookupRawPointer(const StringData* name) {
445 ensureNvt();
446 return m_nvTable->lookupRawPointer(name);
449 TypedValue* VarEnv::lookupAddRawPointer(const StringData* name) {
450 ensureNvt();
451 return m_nvTable->lookupAddRawPointer(name);
454 bool VarEnv::unset(const StringData* name) {
455 if (!m_nvTable) return true;
456 m_nvTable->unset(name);
457 return true;
460 Array VarEnv::getDefinedVariables() const {
461 Array ret = Array::Create();
463 if (!m_nvTable) return ret;
465 NameValueTable::Iterator iter(&*m_nvTable);
466 for (; iter.valid(); iter.next()) {
467 const StringData* sd = iter.curKey();
468 const TypedValue* tv = iter.curVal();
469 if (tvAsCVarRef(tv).isReferenced()) {
470 ret.setRef(StrNR(sd).asString(), tvAsCVarRef(tv));
471 } else {
472 ret.add(StrNR(sd).asString(), tvAsCVarRef(tv));
476 return ret;
479 TypedValue* VarEnv::getExtraArg(unsigned argInd) const {
480 return m_extraArgs->getExtraArg(argInd);
483 //=============================================================================
485 ExtraArgs::ExtraArgs() {}
486 ExtraArgs::~ExtraArgs() {}
488 void* ExtraArgs::allocMem(unsigned nargs) {
489 return smart_malloc(sizeof(TypedValue) * nargs + sizeof(ExtraArgs));
492 ExtraArgs* ExtraArgs::allocateCopy(TypedValue* args, unsigned nargs) {
493 void* mem = allocMem(nargs);
494 ExtraArgs* ea = new (mem) ExtraArgs();
497 * The stack grows downward, so the args in memory are "backward"; i.e. the
498 * leftmost (in PHP) extra arg is highest in memory.
500 std::reverse_copy(args, args + nargs, &ea->m_extraArgs[0]);
501 return ea;
504 ExtraArgs* ExtraArgs::allocateUninit(unsigned nargs) {
505 void* mem = ExtraArgs::allocMem(nargs);
506 return new (mem) ExtraArgs();
509 void ExtraArgs::deallocate(ExtraArgs* ea, unsigned nargs) {
510 assert(nargs > 0);
512 for (unsigned i = 0; i < nargs; ++i) {
513 tvRefcountedDecRef(ea->m_extraArgs + i);
515 ea->~ExtraArgs();
516 smart_free(ea);
519 void ExtraArgs::deallocate(ActRec* ar) {
520 const int numExtra = ar->numArgs() - ar->m_func->numParams();
521 deallocate(ar->getExtraArgs(), numExtra);
524 TypedValue* ExtraArgs::getExtraArg(unsigned argInd) const {
525 return const_cast<TypedValue*>(&m_extraArgs[argInd]);
528 //=============================================================================
529 // Stack.
531 // Store actual stack elements array in a thread-local in order to amortize the
532 // cost of allocation.
533 class StackElms {
534 public:
535 StackElms() : m_elms(nullptr) {}
536 ~StackElms() {
537 flush();
539 TypedValue* elms() {
540 if (m_elms == nullptr) {
541 // RuntimeOption::EvalVMStackElms-sized and -aligned.
542 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
543 if (posix_memalign((void**)&m_elms, algnSz, algnSz) != 0) {
544 throw std::runtime_error(
545 std::string("VM stack initialization failed: ") + strerror(errno));
548 return m_elms;
550 void flush() {
551 if (m_elms != nullptr) {
552 free(m_elms);
553 m_elms = nullptr;
556 private:
557 TypedValue* m_elms;
559 IMPLEMENT_THREAD_LOCAL(StackElms, t_se);
561 const int Stack::sSurprisePageSize = sysconf(_SC_PAGESIZE);
562 // We reserve the bottom page of each stack for use as the surprise
563 // page, so the minimum useful stack size is the next power of two.
564 const uint Stack::sMinStackElms = 2 * sSurprisePageSize / sizeof(TypedValue);
566 void Stack::ValidateStackSize() {
567 if (RuntimeOption::EvalVMStackElms < sMinStackElms) {
568 throw std::runtime_error(str(
569 boost::format("VM stack size of 0x%llx is below the minimum of 0x%x")
570 % RuntimeOption::EvalVMStackElms
571 % sMinStackElms));
573 if (!Util::isPowerOfTwo(RuntimeOption::EvalVMStackElms)) {
574 throw std::runtime_error(str(
575 boost::format("VM stack size of 0x%llx is not a power of 2")
576 % RuntimeOption::EvalVMStackElms));
580 Stack::Stack()
581 : m_elms(nullptr), m_top(nullptr), m_base(nullptr) {
584 Stack::~Stack() {
585 requestExit();
588 void
589 Stack::protect() {
590 if (Transl::trustSigSegv) {
591 mprotect(m_elms, sizeof(void*), PROT_NONE);
595 void
596 Stack::unprotect() {
597 if (Transl::trustSigSegv) {
598 mprotect(m_elms, sizeof(void*), PROT_READ | PROT_WRITE);
602 void
603 Stack::requestInit() {
604 m_elms = t_se->elms();
605 if (Transl::trustSigSegv) {
606 RequestInjectionData& data = ThreadInfo::s_threadInfo->m_reqInjectionData;
607 Lock l(data.surpriseLock);
608 assert(data.surprisePage == nullptr);
609 data.surprisePage = m_elms;
611 // Burn one element of the stack, to satisfy the constraint that
612 // valid m_top values always have the same high-order (>
613 // log(RuntimeOption::EvalVMStackElms)) bits.
614 m_top = m_base = m_elms + RuntimeOption::EvalVMStackElms - 1;
616 // Because of the surprise page at the bottom of the stack we lose an
617 // additional 256 elements which must be taken into account when checking for
618 // overflow.
619 UNUSED size_t maxelms =
620 RuntimeOption::EvalVMStackElms - sSurprisePageSize / sizeof(TypedValue);
621 assert(!wouldOverflow(maxelms - 1));
622 assert(wouldOverflow(maxelms));
624 // Reset permissions on our stack's surprise page
625 unprotect();
628 void
629 Stack::requestExit() {
630 if (m_elms != nullptr) {
631 if (Transl::trustSigSegv) {
632 RequestInjectionData& data = ThreadInfo::s_threadInfo->m_reqInjectionData;
633 Lock l(data.surpriseLock);
634 assert(data.surprisePage == m_elms);
635 unprotect();
636 data.surprisePage = nullptr;
638 m_elms = nullptr;
642 void flush_evaluation_stack() {
643 if (g_context.isNull()) {
644 // For RPCRequestHandler threads, the ExecutionContext can stay alive
645 // across requests, and hold references to the VM stack, and
646 // the TargetCache needs to keep track of which classes are live etc
647 // So only flush the VM stack and the target cache if the execution
648 // context is dead.
650 if (!t_se.isNull()) {
651 t_se->flush();
653 Transl::TargetCache::flush();
657 static std::string toStringElm(const TypedValue* tv) {
658 std::ostringstream os;
660 if (tv->m_type < MinDataType || tv->m_type > MaxNumDataTypes) {
661 os << " ??? type " << tv->m_type << "\n";
662 return os.str();
665 assert(tv->m_type >= MinDataType && tv->m_type < MaxNumDataTypes);
666 if (IS_REFCOUNTED_TYPE(tv->m_type) && tv->m_data.pref->_count <= 0) {
667 // OK in the invoking frame when running a destructor.
668 os << " ??? inner_count " << tv->m_data.pref->_count << " ";
669 return os.str();
672 switch (tv->m_type) {
673 case KindOfRef:
674 os << "V:(";
675 os << "@" << tv->m_data.pref;
676 os << toStringElm(tv->m_data.pref->tv());
677 os << ")";
678 return os.str();
679 case KindOfClass:
680 os << "A:";
681 break;
682 default:
683 os << "C:";
684 break;
687 switch (tv->m_type) {
688 case KindOfUninit:
689 os << "Uninit";
690 break;
691 case KindOfNull:
692 os << "Null";
693 break;
694 case KindOfBoolean:
695 os << (tv->m_data.num ? "True" : "False");
696 break;
697 case KindOfInt64:
698 os << "0x" << std::hex << tv->m_data.num << std::dec;
699 break;
700 case KindOfDouble:
701 os << tv->m_data.dbl;
702 break;
703 case KindOfStaticString:
704 case KindOfString:
706 int len = tv->m_data.pstr->size();
707 bool truncated = false;
708 if (len > 128) {
709 len = 128;
710 truncated = true;
712 os << tv->m_data.pstr
713 << "c(" << tv->m_data.pstr->getCount() << ")"
714 << ":\""
715 << Util::escapeStringForCPP(tv->m_data.pstr->data(), len)
716 << "\"" << (truncated ? "..." : "");
718 break;
719 case KindOfArray:
720 assert(tv->m_data.parr->getCount() > 0);
721 os << tv->m_data.parr
722 << "c(" << tv->m_data.parr->getCount() << ")"
723 << ":Array";
724 break;
725 case KindOfObject:
726 assert(tv->m_data.pobj->getCount() > 0);
727 os << tv->m_data.pobj
728 << "c(" << tv->m_data.pobj->getCount() << ")"
729 << ":Object("
730 << tvAsCVarRef(tv).asCObjRef().get()->o_getClassName().get()->data()
731 << ")";
732 break;
733 case KindOfRef:
734 not_reached();
735 case KindOfClass:
736 os << tv->m_data.pcls
737 << ":" << tv->m_data.pcls->name()->data();
738 break;
739 default:
740 os << "?";
741 break;
744 return os.str();
747 static std::string toStringIter(const Iter* it, bool itRef) {
748 if (itRef) return "I:MutableArray";
750 // TODO(#2458166): it might be a CufIter, but we're just lucky that
751 // the bit pattern for the CufIter is going to have a 0 in
752 // getIterType for now.
753 switch (it->arr().getIterType()) {
754 case ArrayIter::TypeUndefined:
755 return "I:Undefined";
756 case ArrayIter::TypeArray:
757 return "I:Array";
758 case ArrayIter::TypeIterator:
759 return "I:Iterator";
761 assert(false);
762 return "I:?";
765 void Stack::toStringFrame(std::ostream& os, const ActRec* fp,
766 int offset, const TypedValue* ftop,
767 const string& prefix) const {
768 assert(fp);
770 // Use depth-first recursion to output the most deeply nested stack frame
771 // first.
773 Offset prevPc = 0;
774 TypedValue* prevStackTop = nullptr;
775 ActRec* prevFp = g_vmContext->getPrevVMState(fp, &prevPc, &prevStackTop);
776 if (prevFp != nullptr) {
777 toStringFrame(os, prevFp, prevPc, prevStackTop, prefix);
781 os << prefix;
782 const Func* func = fp->m_func;
783 assert(func);
784 func->validate();
785 string funcName(func->fullName()->data());
786 os << "{func:" << funcName
787 << ",soff:" << fp->m_soff
788 << ",this:0x" << std::hex << (fp->hasThis() ? fp->getThis() : nullptr)
789 << std::dec << "}";
790 TypedValue* tv = (TypedValue*)fp;
791 tv--;
793 if (func->numLocals() > 0) {
794 os << "<";
795 int n = func->numLocals();
796 for (int i = 0; i < n; i++, tv--) {
797 if (i > 0) {
798 os << " ";
800 os << toStringElm(tv);
802 os << ">";
805 assert(!func->info() || func->numIterators() == 0);
806 if (func->numIterators() > 0) {
807 os << "|";
808 Iter* it = &((Iter*)&tv[1])[-1];
809 for (int i = 0; i < func->numIterators(); i++, it--) {
810 if (i > 0) {
811 os << " ";
813 bool itRef;
814 if (func->checkIterScope(offset, i, itRef)) {
815 os << toStringIter(it, itRef);
816 } else {
817 os << "I:Undefined";
820 os << "|";
823 std::vector<std::string> stackElems;
824 visitStackElems(
825 fp, ftop, offset,
826 [&](const ActRec* ar) {
827 stackElems.push_back(
828 folly::format("{{func:{}}}", ar->m_func->fullName()->data()).str()
831 [&](const TypedValue* tv) {
832 stackElems.push_back(toStringElm(tv));
835 std::reverse(stackElems.begin(), stackElems.end());
836 os << ' ' << folly::join(' ', stackElems);
838 os << '\n';
841 string Stack::toString(const ActRec* fp, int offset,
842 const string prefix/* = "" */) const {
843 // The only way to figure out which stack elements are activation records is
844 // to follow the frame chain. However, the goal for each stack frame is to
845 // print stack fragments from deepest to shallowest -- a then b in the
846 // following example:
848 // {func:foo,soff:51}<C:8> {func:bar} C:8 C:1 {func:biz} C:0
849 // aaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbb
851 // Use depth-first recursion to get the output order correct.
853 std::ostringstream os;
854 os << prefix << "=== Stack at " << curUnit()->filepath()->data() << ":" <<
855 curUnit()->getLineNumber(curUnit()->offsetOf(vmpc())) << " func " <<
856 curFunc()->fullName()->data() << " ===\n";
858 toStringFrame(os, fp, offset, m_top, prefix);
860 return os.str();
863 bool Stack::wouldOverflow(int numCells) const {
864 // The funny approach here is to validate the translator's assembly
865 // technique. We've aligned and sized the stack so that the high order
866 // bits of valid cells are all the same. In the translator, numCells
867 // can be hardcoded, and m_top is wired into a register,
868 // so the expression requires no loads.
869 intptr_t truncatedTop = intptr_t(m_top) / sizeof(TypedValue);
870 truncatedTop &= RuntimeOption::EvalVMStackElms - 1;
871 intptr_t diff = truncatedTop - numCells -
872 sSurprisePageSize / sizeof(TypedValue);
873 return diff < 0;
876 TypedValue* Stack::frameStackBase(const ActRec* fp) {
877 const Func* func = fp->m_func;
878 assert(!func->isGenerator());
879 return (TypedValue*)((uintptr_t)fp
880 - (uintptr_t)(func->numLocals()) * sizeof(TypedValue)
881 - (uintptr_t)(func->numIterators() * sizeof(Iter)));
884 TypedValue* Stack::generatorStackBase(const ActRec* fp) {
885 assert(fp->m_func->isGenerator());
886 VMExecutionContext* context = g_vmContext;
887 ActRec* sfp = fp->arGetSfp();
888 if (sfp == fp) {
889 // In the reentrant case, we can consult the savedVM state. We simply
890 // use the top of stack of the previous VM frame (since the ActRec,
891 // locals, and iters for this frame do not reside on the VM stack).
892 return context->m_nestedVMs.back().m_savedState.sp;
894 // In the non-reentrant case, we know generators are always called from a
895 // function with an empty stack. So we find the caller's FP, compensate
896 // for its locals, and then we've found the base of the generator's stack.
897 return (TypedValue*)sfp - sfp->m_func->numSlotsInFrame();
901 __thread RequestArenaStorage s_requestArenaStorage;
902 __thread VarEnvArenaStorage s_varEnvArenaStorage;
905 //=============================================================================
906 // ExecutionContext.
908 using namespace HPHP;
909 using namespace HPHP::MethodLookup;
911 ActRec* VMExecutionContext::getOuterVMFrame(const ActRec* ar) {
912 ActRec* prevFrame = (ActRec*)ar->m_savedRbp;
913 if (LIKELY(((uintptr_t)prevFrame - Util::s_stackLimit) >=
914 Util::s_stackSize)) {
915 if (LIKELY(prevFrame != nullptr)) return prevFrame;
918 if (LIKELY(!m_nestedVMs.empty())) return m_nestedVMs.back().m_savedState.fp;
919 return nullptr;
922 TypedValue* VMExecutionContext::lookupClsCns(const NamedEntity* ne,
923 const StringData* cls,
924 const StringData* cns) {
925 Class* class_ = Unit::loadClass(ne, cls);
926 if (class_ == nullptr) {
927 raise_error(Strings::UNKNOWN_CLASS, cls->data());
929 TypedValue* clsCns = class_->clsCnsGet(cns);
930 if (clsCns == nullptr) {
931 raise_error("Couldn't find constant %s::%s",
932 cls->data(), cns->data());
934 return clsCns;
937 TypedValue* VMExecutionContext::lookupClsCns(const StringData* cls,
938 const StringData* cns) {
939 return lookupClsCns(Unit::GetNamedEntity(cls), cls, cns);
942 // Look up the method specified by methodName from the class specified by cls
943 // and enforce accessibility. Accessibility checks depend on the relationship
944 // between the class that first declared the method (baseClass) and the context
945 // class (ctx).
947 // If there are multiple accessible methods with the specified name declared in
948 // cls and ancestors of cls, the method from the most derived class will be
949 // returned, except if we are doing an ObjMethod call ("$obj->foo()") and there
950 // is an accessible private method, in which case the accessible private method
951 // will be returned.
953 // Accessibility rules:
955 // | baseClass/ctx relationship | public | protected | private |
956 // +----------------------------+--------+-----------+---------+
957 // | anon/unrelated | yes | no | no |
958 // | baseClass == ctx | yes | yes | yes |
959 // | baseClass derived from ctx | yes | yes | no |
960 // | ctx derived from baseClass | yes | yes | no |
961 // +----------------------------+--------+-----------+---------+
963 const Func* VMExecutionContext::lookupMethodCtx(const Class* cls,
964 const StringData* methodName,
965 Class* ctx,
966 CallType callType,
967 bool raise /* = false */) {
968 const Func* method;
969 if (callType == CallType::CtorMethod) {
970 assert(methodName == nullptr);
971 method = cls->getCtor();
972 } else {
973 assert(callType == CallType::ObjMethod || callType == CallType::ClsMethod);
974 assert(methodName != nullptr);
975 method = cls->lookupMethod(methodName);
976 while (!method) {
977 static StringData* sd__construct
978 = StringData::GetStaticString("__construct");
979 if (UNLIKELY(methodName == sd__construct)) {
980 // We were looking up __construct and failed to find it. Fall back
981 // to old-style constructor: same as class name.
982 method = cls->getCtor();
983 if (!Func::isSpecial(method->name())) break;
985 if (raise) {
986 raise_error("Call to undefined method %s::%s from %s%s",
987 cls->name()->data(),
988 methodName->data(),
989 ctx ? "context " : "anonymous context",
990 ctx ? ctx->name()->data() : "");
992 return nullptr;
995 assert(method);
996 bool accessible = true;
997 // If we found a protected or private method, we need to do some
998 // accessibility checks.
999 if ((method->attrs() & (AttrProtected|AttrPrivate)) &&
1000 !g_vmContext->getDebuggerBypassCheck()) {
1001 Class* baseClass = method->baseCls();
1002 assert(baseClass);
1003 // If the context class is the same as the class that first
1004 // declared this method, then we know we have the right method
1005 // and we can stop here.
1006 if (ctx == baseClass) {
1007 return method;
1009 // The anonymous context cannot access protected or private methods,
1010 // so we can fail fast here.
1011 if (ctx == nullptr) {
1012 if (raise) {
1013 raise_error("Call to %s method %s::%s from anonymous context",
1014 (method->attrs() & AttrPrivate) ? "private" : "protected",
1015 cls->name()->data(),
1016 method->name()->data());
1018 return nullptr;
1020 assert(ctx);
1021 if (method->attrs() & AttrPrivate) {
1022 // The context class is not the same as the class that declared
1023 // this private method, so this private method is not accessible.
1024 // We need to keep going because the context class may define a
1025 // private method with this name.
1026 accessible = false;
1027 } else {
1028 // If the context class is derived from the class that first
1029 // declared this protected method, then we know this method is
1030 // accessible and we know the context class cannot have a private
1031 // method with the same name, so we're done.
1032 if (ctx->classof(baseClass)) {
1033 return method;
1035 if (!baseClass->classof(ctx)) {
1036 // The context class is not the same, an ancestor, or a descendent
1037 // of the class that first declared this protected method, so
1038 // this method is not accessible. Because the context class is
1039 // not the same or an ancestor of the class which first declared
1040 // the method, we know that the context class is not the same
1041 // or an ancestor of cls, and therefore we don't need to check
1042 // if the context class declares a private method with this name,
1043 // so we can fail fast here.
1044 if (raise) {
1045 raise_error("Call to protected method %s::%s from context %s",
1046 cls->name()->data(),
1047 method->name()->data(),
1048 ctx->name()->data());
1050 return nullptr;
1052 // We now know this protected method is accessible, but we need to
1053 // keep going because the context class may define a private method
1054 // with this name.
1055 assert(accessible && baseClass->classof(ctx));
1058 // If this is an ObjMethod call ("$obj->foo()") AND there is an ancestor
1059 // of cls that declares a private method with this name AND the context
1060 // class is an ancestor of cls, check if the context class declares a
1061 // private method with this name.
1062 if (method->hasPrivateAncestor() && callType == CallType::ObjMethod &&
1063 ctx && cls->classof(ctx)) {
1064 const Func* ctxMethod = ctx->lookupMethod(methodName);
1065 if (ctxMethod && ctxMethod->cls() == ctx &&
1066 (ctxMethod->attrs() & AttrPrivate)) {
1067 // For ObjMethod calls a private method from the context class
1068 // trumps any other method we may have found.
1069 return ctxMethod;
1072 if (accessible) {
1073 return method;
1075 if (raise) {
1076 raise_error("Call to private method %s::%s from %s%s",
1077 method->baseCls()->name()->data(),
1078 method->name()->data(),
1079 ctx ? "context " : "anonymous context",
1080 ctx ? ctx->name()->data() : "");
1082 return nullptr;
1085 LookupResult VMExecutionContext::lookupObjMethod(const Func*& f,
1086 const Class* cls,
1087 const StringData* methodName,
1088 bool raise /* = false */) {
1089 Class* ctx = arGetContextClass(getFP());
1090 f = lookupMethodCtx(cls, methodName, ctx, CallType::ObjMethod, false);
1091 if (!f) {
1092 f = cls->lookupMethod(s___call.get());
1093 if (!f) {
1094 if (raise) {
1095 // Throw a fatal error
1096 lookupMethodCtx(cls, methodName, ctx, CallType::ObjMethod, true);
1098 return LookupResult::MethodNotFound;
1100 return LookupResult::MagicCallFound;
1102 if (f->attrs() & AttrStatic && !f->isClosureBody()) {
1103 return LookupResult::MethodFoundNoThis;
1105 return LookupResult::MethodFoundWithThis;
1108 LookupResult
1109 VMExecutionContext::lookupClsMethod(const Func*& f,
1110 const Class* cls,
1111 const StringData* methodName,
1112 ObjectData* obj,
1113 ActRec* vmfp,
1114 bool raise /* = false */) {
1115 Class* ctx = arGetContextClass(vmfp);
1116 f = lookupMethodCtx(cls, methodName, ctx, CallType::ClsMethod, false);
1117 if (!f) {
1118 if (obj && obj->instanceof(cls)) {
1119 f = obj->getVMClass()->lookupMethod(s___call.get());
1121 if (!f) {
1122 f = cls->lookupMethod(s___callStatic.get());
1123 if (!f) {
1124 if (raise) {
1125 // Throw a fatal errpr
1126 lookupMethodCtx(cls, methodName, ctx, CallType::ClsMethod, true);
1128 return LookupResult::MethodNotFound;
1130 f->validate();
1131 assert(f);
1132 assert(f->attrs() & AttrStatic);
1133 return LookupResult::MagicCallStaticFound;
1135 assert(f);
1136 assert(obj);
1137 // __call cannot be static, this should be enforced by semantic
1138 // checks defClass time or earlier
1139 assert(!(f->attrs() & AttrStatic));
1140 return LookupResult::MagicCallFound;
1142 if (obj && !(f->attrs() & AttrStatic) && obj->instanceof(cls)) {
1143 return LookupResult::MethodFoundWithThis;
1145 return LookupResult::MethodFoundNoThis;
1148 LookupResult VMExecutionContext::lookupCtorMethod(const Func*& f,
1149 const Class* cls,
1150 bool raise /* = false */) {
1151 f = cls->getCtor();
1152 if (!(f->attrs() & AttrPublic)) {
1153 Class* ctx = arGetContextClass(getFP());
1154 f = lookupMethodCtx(cls, nullptr, ctx, CallType::CtorMethod, raise);
1155 if (!f) {
1156 // If raise was true than lookupMethodCtx should have thrown,
1157 // so we should only be able to get here if raise was false
1158 assert(!raise);
1159 return LookupResult::MethodNotFound;
1162 return LookupResult::MethodFoundWithThis;
1165 ObjectData* VMExecutionContext::createObject(StringData* clsName,
1166 CArrRef params,
1167 bool init /* = true */) {
1168 Class* class_ = Unit::loadClass(clsName);
1169 if (class_ == nullptr) {
1170 throw_missing_class(clsName->data());
1172 Object o;
1173 o = newInstance(class_);
1174 if (init) {
1175 // call constructor
1176 TypedValue ret;
1177 invokeFunc(&ret, class_->getCtor(), params, o.get());
1178 tvRefcountedDecRef(&ret);
1181 ObjectData* ret = o.detach();
1182 ret->decRefCount();
1183 return ret;
1186 ObjectData* VMExecutionContext::createObjectOnly(StringData* clsName) {
1187 return createObject(clsName, null_array, false);
1190 ActRec* VMExecutionContext::getStackFrame() {
1191 VMRegAnchor _;
1192 return getFP();
1195 ObjectData* VMExecutionContext::getThis() {
1196 VMRegAnchor _;
1197 ActRec* fp = getFP();
1198 if (fp->skipFrame()) {
1199 fp = getPrevVMState(fp);
1200 if (!fp) return nullptr;
1202 if (fp->hasThis()) {
1203 return fp->getThis();
1205 return nullptr;
1208 Class* VMExecutionContext::getContextClass() {
1209 VMRegAnchor _;
1210 ActRec* ar = getFP();
1211 assert(ar != nullptr);
1212 if (ar->skipFrame()) {
1213 ar = getPrevVMState(ar);
1214 if (!ar) return nullptr;
1216 return ar->m_func->cls();
1219 Class* VMExecutionContext::getParentContextClass() {
1220 if (Class* ctx = getContextClass()) {
1221 return ctx->parent();
1223 return nullptr;
1226 CStrRef VMExecutionContext::getContainingFileName() {
1227 VMRegAnchor _;
1228 ActRec* ar = getFP();
1229 if (ar == nullptr) return empty_string;
1230 if (ar->skipFrame()) {
1231 ar = getPrevVMState(ar);
1232 if (ar == nullptr) return empty_string;
1234 Unit* unit = ar->m_func->unit();
1235 return unit->filepathRef();
1238 int VMExecutionContext::getLine() {
1239 VMRegAnchor _;
1240 ActRec* ar = getFP();
1241 Unit* unit = ar ? ar->m_func->unit() : nullptr;
1242 Offset pc = unit ? pcOff() : 0;
1243 if (ar == nullptr) return -1;
1244 if (ar->skipFrame()) {
1245 ar = getPrevVMState(ar, &pc);
1247 if (ar == nullptr || (unit = ar->m_func->unit()) == nullptr) return -1;
1248 return unit->getLineNumber(pc);
1251 Array VMExecutionContext::getCallerInfo() {
1252 VMRegAnchor _;
1253 Array result = Array::Create();
1254 ActRec* ar = getFP();
1255 if (ar->skipFrame()) {
1256 ar = getPrevVMState(ar);
1258 while (ar->m_func->name()->isame(s_call_user_func.get())
1259 || ar->m_func->name()->isame(s_call_user_func_array.get())) {
1260 ar = getPrevVMState(ar);
1261 if (ar == nullptr) {
1262 return result;
1266 Offset pc = 0;
1267 ar = getPrevVMState(ar, &pc);
1268 while (ar != nullptr) {
1269 if (!ar->m_func->name()->isame(s_call_user_func.get())
1270 && !ar->m_func->name()->isame(s_call_user_func_array.get())) {
1271 Unit* unit = ar->m_func->unit();
1272 int lineNumber;
1273 if ((lineNumber = unit->getLineNumber(pc)) != -1) {
1274 result.set(s_file, unit->filepath()->data(), true);
1275 result.set(s_line, lineNumber);
1276 return result;
1279 ar = getPrevVMState(ar, &pc);
1281 return result;
1284 bool VMExecutionContext::renameFunction(const StringData* oldName,
1285 const StringData* newName) {
1286 return m_renamedFuncs.rename(oldName, newName);
1289 bool VMExecutionContext::isFunctionRenameable(const StringData* name) {
1290 return m_renamedFuncs.isFunctionRenameable(name);
1293 void VMExecutionContext::addRenameableFunctions(ArrayData* arr) {
1294 m_renamedFuncs.addRenameableFunctions(arr);
1297 VarEnv* VMExecutionContext::getVarEnv() {
1298 VMRegAnchor _;
1300 ActRec* fp = getFP();
1301 if (UNLIKELY(!fp)) return NULL;
1302 if (fp->skipFrame()) {
1303 fp = getPrevVMState(fp);
1305 if (!fp) return nullptr;
1306 assert(!fp->hasInvName());
1307 if (!fp->hasVarEnv()) {
1308 fp->setVarEnv(VarEnv::createLocalOnStack(fp));
1310 return fp->m_varEnv;
1313 void VMExecutionContext::setVar(StringData* name, TypedValue* v, bool ref) {
1314 VMRegAnchor _;
1315 // setVar() should only be called after getVarEnv() has been called
1316 // to create a varEnv
1317 ActRec *fp = getFP();
1318 if (!fp) return;
1319 if (fp->skipFrame()) {
1320 fp = getPrevVMState(fp);
1322 assert(!fp->hasInvName());
1323 assert(!fp->hasExtraArgs());
1324 assert(fp->m_varEnv != nullptr);
1325 if (ref) {
1326 fp->m_varEnv->bind(name, v);
1327 } else {
1328 fp->m_varEnv->set(name, v);
1332 Array VMExecutionContext::getLocalDefinedVariables(int frame) {
1333 VMRegAnchor _;
1334 ActRec *fp = getFP();
1335 for (; frame > 0; --frame) {
1336 if (!fp) break;
1337 fp = getPrevVMState(fp);
1339 if (!fp) {
1340 return Array::Create();
1342 assert(!fp->hasInvName());
1343 if (fp->hasVarEnv()) {
1344 return fp->m_varEnv->getDefinedVariables();
1346 const Func *func = fp->m_func;
1347 auto numLocals = func->numNamedLocals();
1348 ArrayInit ret(numLocals);
1349 for (Id id = 0; id < numLocals; ++id) {
1350 TypedValue* ptv = frame_local(fp, id);
1351 if (ptv->m_type == KindOfUninit) {
1352 continue;
1354 Variant name(func->localVarName(id));
1355 ret.add(name, tvAsVariant(ptv));
1357 return ret.toArray();
1360 void VMExecutionContext::shuffleMagicArgs(ActRec* ar) {
1361 // We need to put this where the first argument is
1362 StringData* invName = ar->getInvName();
1363 int nargs = ar->numArgs();
1364 ar->setVarEnv(nullptr);
1365 assert(!ar->hasVarEnv() && !ar->hasInvName());
1366 // We need to make an array containing all the arguments passed by the
1367 // caller and put it where the second argument is
1368 ArrayData* argArray = pack_args_into_array(ar, nargs);
1369 argArray->incRefCount();
1370 // Remove the arguments from the stack
1371 for (int i = 0; i < nargs; ++i) {
1372 m_stack.popC();
1374 // Move invName to where the first argument belongs, no need
1375 // to incRef/decRef since we are transferring ownership
1376 m_stack.pushStringNoRc(invName);
1377 // Move argArray to where the second argument belongs. We've already
1378 // incReffed the array above so we don't need to do it here.
1379 m_stack.pushArrayNoRc(argArray);
1381 ar->setNumArgs(2);
1384 static inline void checkStack(Stack& stk, const Func* f) {
1385 ThreadInfo* info = ThreadInfo::s_threadInfo.getNoCheck();
1386 // Check whether func's maximum stack usage would overflow the stack.
1387 // Both native and VM stack overflows are independently possible.
1388 if (!stack_in_bounds(info) ||
1389 stk.wouldOverflow(f->maxStackCells() + kStackCheckPadding)) {
1390 TRACE(1, "Maximum VM stack depth exceeded.\n");
1391 raise_error("Stack overflow");
1395 bool VMExecutionContext::prepareFuncEntry(ActRec *ar, PC& pc) {
1396 const Func* func = ar->m_func;
1397 Offset firstDVInitializer = InvalidAbsoluteOffset;
1398 bool raiseMissingArgumentWarnings = false;
1399 int nparams = func->numParams();
1400 if (UNLIKELY(ar->m_varEnv != nullptr)) {
1402 * m_varEnv != nullptr => we have a varEnv, extraArgs, or an invName.
1404 if (ar->hasInvName()) {
1405 // shuffleMagicArgs deals with everything. no need for
1406 // further argument munging
1407 shuffleMagicArgs(ar);
1408 } else if (ar->hasVarEnv()) {
1409 m_fp = ar;
1410 if (!func->isGenerator()) {
1411 assert(func->isPseudoMain());
1412 pushLocalsAndIterators(func);
1413 ar->m_varEnv->attach(ar);
1415 pc = func->getEntry();
1416 // Nothing more to do; get out
1417 return true;
1418 } else {
1419 assert(ar->hasExtraArgs());
1420 assert(func->numParams() < ar->numArgs());
1422 } else {
1423 int nargs = ar->numArgs();
1424 if (nargs != nparams) {
1425 if (nargs < nparams) {
1426 // Push uninitialized nulls for missing arguments. Some of them may end
1427 // up getting default-initialized, but regardless, we need to make space
1428 // for them on the stack.
1429 const Func::ParamInfoVec& paramInfo = func->params();
1430 for (int i = nargs; i < nparams; ++i) {
1431 m_stack.pushUninit();
1432 Offset dvInitializer = paramInfo[i].funcletOff();
1433 if (dvInitializer == InvalidAbsoluteOffset) {
1434 // We wait to raise warnings until after all the locals have been
1435 // initialized. This is important because things need to be in a
1436 // consistent state in case the user error handler throws.
1437 raiseMissingArgumentWarnings = true;
1438 } else if (firstDVInitializer == InvalidAbsoluteOffset) {
1439 // This is the first unpassed arg with a default value, so
1440 // this is where we'll need to jump to.
1441 firstDVInitializer = dvInitializer;
1444 } else {
1445 if (func->attrs() & AttrMayUseVV) {
1446 // Extra parameters must be moved off the stack.
1447 const int numExtras = nargs - nparams;
1448 ar->setExtraArgs(ExtraArgs::allocateCopy((TypedValue*)ar - nargs,
1449 numExtras));
1450 m_stack.ndiscard(numExtras);
1451 } else {
1452 // The function we're calling is not marked as "MayUseVV",
1453 // so just discard the extra arguments
1454 int numExtras = nargs - nparams;
1455 for (int i = 0; i < numExtras; i++) {
1456 m_stack.popTV();
1458 ar->setNumArgs(nparams);
1464 int nlocals = nparams;
1465 if (UNLIKELY(func->isClosureBody())) {
1466 int nuse = init_closure(ar, m_stack.top());
1467 // init_closure doesn't move m_stack
1468 m_stack.nalloc(nuse);
1469 nlocals += nuse;
1470 func = ar->m_func;
1473 if (LIKELY(!func->isGenerator())) {
1475 * we only get here from callAndResume
1476 * if we failed to get a translation for
1477 * a generator's prologue
1479 pushLocalsAndIterators(func, nlocals);
1482 m_fp = ar;
1483 if (firstDVInitializer != InvalidAbsoluteOffset) {
1484 pc = func->unit()->entry() + firstDVInitializer;
1485 } else {
1486 pc = func->getEntry();
1488 // cppext functions/methods have their own logic for raising
1489 // warnings for missing arguments, so we only need to do this work
1490 // for non-cppext functions/methods
1491 if (raiseMissingArgumentWarnings && !func->info()) {
1492 // need to sync m_pc to pc for backtraces/re-entry
1493 SYNC();
1494 const Func::ParamInfoVec& paramInfo = func->params();
1495 for (int i = ar->numArgs(); i < nparams; ++i) {
1496 Offset dvInitializer = paramInfo[i].funcletOff();
1497 if (dvInitializer == InvalidAbsoluteOffset) {
1498 const char* name = func->name()->data();
1499 if (nparams == 1) {
1500 raise_warning(Strings::MISSING_ARGUMENT, name, i);
1501 } else {
1502 raise_warning(Strings::MISSING_ARGUMENTS, name, nparams, i);
1507 return true;
1510 void VMExecutionContext::syncGdbState() {
1511 if (RuntimeOption::EvalJit && !RuntimeOption::EvalJitNoGdb) {
1512 tx()->getDebugInfo()->debugSync();
1516 void VMExecutionContext::enterVMPrologue(ActRec* enterFnAr) {
1517 assert(enterFnAr);
1518 Stats::inc(Stats::VMEnter);
1519 if (ThreadInfo::s_threadInfo->m_reqInjectionData.getJit()) {
1520 int np = enterFnAr->m_func->numParams();
1521 int na = enterFnAr->numArgs();
1522 if (na > np) na = np + 1;
1523 Transl::TCA start = enterFnAr->m_func->getPrologue(na);
1524 tx()->enterTCAtProlog(enterFnAr, start);
1525 } else {
1526 if (prepareFuncEntry(enterFnAr, m_pc)) {
1527 enterVMWork(enterFnAr);
1532 void VMExecutionContext::enterVMWork(ActRec* enterFnAr) {
1533 Transl::TCA start = nullptr;
1534 if (enterFnAr) {
1535 if (!EventHook::FunctionEnter(enterFnAr, EventHook::NormalFunc)) return;
1536 checkStack(m_stack, enterFnAr->m_func);
1537 start = enterFnAr->m_func->getFuncBody();
1539 Stats::inc(Stats::VMEnter);
1540 if (ThreadInfo::s_threadInfo->m_reqInjectionData.getJit()) {
1541 (void) curUnit()->offsetOf(m_pc); /* assert */
1542 if (enterFnAr) {
1543 assert(start);
1544 tx()->enterTCAfterProlog(start);
1545 } else {
1546 SrcKey sk(curFunc(), m_pc);
1547 tx()->enterTCAtSrcKey(sk);
1549 } else {
1550 dispatch();
1554 void VMExecutionContext::enterVM(TypedValue* retval, ActRec* ar) {
1555 DEBUG_ONLY int faultDepth = m_faults.size();
1556 SCOPE_EXIT { assert(m_faults.size() == faultDepth); };
1558 m_firstAR = ar;
1559 ar->m_savedRip = reinterpret_cast<uintptr_t>(tx()->getCallToExit());
1560 assert(isReturnHelper(ar->m_savedRip));
1563 * When an exception is propagating, each nesting of the VM is
1564 * responsible for unwinding its portion of the execution stack, and
1565 * finding user handlers if it is a catchable exception.
1567 * This try/catch is where all this logic is centered. The actual
1568 * unwinding happens under exception_handler in unwind.cpp, which
1569 * returns a UnwindAction here to indicate what to do next.
1571 * Either we'll enter the VM loop again at a user error/fault
1572 * handler, or propagate the exception to a less-nested VM.
1574 bool first = true;
1575 resume:
1576 try {
1577 if (first) {
1578 first = false;
1579 if (m_fp && !ar->m_varEnv) {
1580 enterVMPrologue(ar);
1581 } else if (prepareFuncEntry(ar, m_pc)) {
1582 enterVMWork(ar);
1584 } else {
1585 enterVMWork(0);
1588 // Everything succeeded with no exception---return to the previous
1589 // VM nesting level.
1590 *retval = *m_stack.topTV();
1591 m_stack.discard();
1592 return;
1594 } catch (...) {
1595 always_assert(Transl::tl_regState == Transl::VMRegState::CLEAN);
1596 auto const action = exception_handler();
1597 if (action == UnwindAction::ResumeVM) {
1598 goto resume;
1600 always_assert(action == UnwindAction::Propagate);
1604 * Here we have to propagate an exception out of this VM's nesting
1605 * level.
1608 if (g_vmContext->m_nestedVMs.empty()) {
1609 m_fp = nullptr;
1610 m_pc = nullptr;
1613 assert(m_faults.size() > 0);
1614 Fault fault = m_faults.back();
1615 m_faults.pop_back();
1617 switch (fault.m_faultType) {
1618 case Fault::Type::UserException:
1620 Object obj = fault.m_userException;
1621 fault.m_userException->decRefCount();
1622 throw obj;
1624 case Fault::Type::CppException:
1625 // throwException() will take care of deleting heap-allocated
1626 // exception object for us
1627 fault.m_cppException->throwException();
1628 not_reached();
1631 not_reached();
1634 void VMExecutionContext::reenterVM(TypedValue* retval,
1635 ActRec* ar,
1636 TypedValue* savedSP) {
1637 ar->m_soff = 0;
1638 ar->m_savedRbp = 0;
1639 VMState savedVM = { getPC(), getFP(), m_firstAR, savedSP };
1640 TRACE(3, "savedVM: %p %p %p %p\n", m_pc, m_fp, m_firstAR, savedSP);
1641 pushVMState(savedVM, ar);
1642 assert(m_nestedVMs.size() >= 1);
1643 try {
1644 enterVM(retval, ar);
1645 popVMState();
1646 } catch (...) {
1647 popVMState();
1648 throw;
1650 TRACE(1, "Reentry: exit fp %p pc %p\n", m_fp, m_pc);
1653 void VMExecutionContext::invokeFunc(TypedValue* retval,
1654 const Func* f,
1655 CArrRef params,
1656 ObjectData* this_ /* = NULL */,
1657 Class* cls /* = NULL */,
1658 VarEnv* varEnv /* = NULL */,
1659 StringData* invName /* = NULL */,
1660 InvokeFlags flags /* = InvokeNormal */) {
1661 assert(retval);
1662 assert(f);
1663 // If this is a regular function, this_ and cls must be NULL
1664 assert(f->preClass() || f->isPseudoMain() || (!this_ && !cls));
1665 // If this is a method, either this_ or cls must be non-NULL
1666 assert(!f->preClass() || (this_ || cls));
1667 // If this is a static method, this_ must be NULL
1668 assert(!(f->attrs() & AttrStatic && !f->isClosureBody()) ||
1669 (!this_));
1670 // invName should only be non-NULL if we are calling __call or
1671 // __callStatic
1672 assert(!invName || f->name()->isame(s___call.get()) ||
1673 f->name()->isame(s___callStatic.get()));
1674 // If a variable environment is being inherited then params must be empty
1675 assert(!varEnv || params.empty());
1677 VMRegAnchor _;
1679 bool isMagicCall = (invName != nullptr);
1681 if (this_ != nullptr) {
1682 this_->incRefCount();
1684 Cell* savedSP = m_stack.top();
1686 if (f->numParams() > kStackCheckReenterPadding - kNumActRecCells) {
1687 checkStack(m_stack, f);
1690 if (flags & InvokePseudoMain) {
1691 assert(f->isPseudoMain() && !params.get());
1692 Unit* toMerge = f->unit();
1693 toMerge->merge();
1694 if (toMerge->isMergeOnly()) {
1695 *retval = *toMerge->getMainReturn();
1696 return;
1700 ActRec* ar = m_stack.allocA();
1701 ar->m_soff = 0;
1702 ar->m_savedRbp = 0;
1703 ar->m_func = f;
1704 if (this_) {
1705 ar->setThis(this_);
1706 } else if (cls) {
1707 ar->setClass(cls);
1708 } else {
1709 ar->setThis(nullptr);
1711 if (isMagicCall) {
1712 ar->initNumArgs(2);
1713 } else {
1714 ar->initNumArgs(params.size());
1716 ar->setVarEnv(varEnv);
1718 #ifdef HPHP_TRACE
1719 if (m_fp == nullptr) {
1720 TRACE(1, "Reentry: enter %s(%p) from top-level\n",
1721 f->name()->data(), ar);
1722 } else {
1723 TRACE(1, "Reentry: enter %s(pc %p ar %p) from %s(%p)\n",
1724 f->name()->data(), m_pc, ar,
1725 m_fp->m_func ? m_fp->m_func->name()->data() : "unknownBuiltin", m_fp);
1727 #endif
1729 ArrayData *arr = params.get();
1730 if (isMagicCall) {
1731 // Put the method name into the location of the first parameter. We
1732 // are transferring ownership, so no need to incRef/decRef here.
1733 m_stack.pushStringNoRc(invName);
1734 // Put array of arguments into the location of the second parameter
1735 m_stack.pushArray(arr);
1736 } else if (arr) {
1737 const int numParams = f->numParams();
1738 const int numExtraArgs = arr->size() - numParams;
1739 ExtraArgs* extraArgs = nullptr;
1740 if (numExtraArgs > 0 && (f->attrs() & AttrMayUseVV)) {
1741 extraArgs = ExtraArgs::allocateUninit(numExtraArgs);
1742 ar->setExtraArgs(extraArgs);
1744 int paramId = 0;
1745 for (ssize_t i = arr->iter_begin();
1746 i != ArrayData::invalid_index;
1747 i = arr->iter_advance(i), ++paramId) {
1748 TypedValue *from = arr->nvGetValueRef(i);
1749 TypedValue *to;
1750 if (LIKELY(paramId < numParams)) {
1751 to = m_stack.allocTV();
1752 } else {
1753 if (!(f->attrs() & AttrMayUseVV)) {
1754 // Discard extra arguments, since the function cannot
1755 // possibly use them.
1756 assert(extraArgs == nullptr);
1757 ar->setNumArgs(numParams);
1758 break;
1760 assert(extraArgs != nullptr && numExtraArgs > 0);
1761 // VarEnv expects the extra args to be in "reverse" order
1762 // (i.e. the last extra arg has the lowest address)
1763 to = extraArgs->getExtraArg(paramId - numParams);
1765 tvDup(*from, *to);
1766 if (LIKELY(!f->byRef(paramId))) {
1767 if (to->m_type == KindOfRef) {
1768 tvUnbox(to);
1770 } else if (!(flags & InvokeIgnoreByRefErrors) &&
1771 (from->m_type != KindOfRef ||
1772 from->m_data.pref->_count == 2)) {
1773 raise_warning("Parameter %d to %s() expected to be "
1774 "a reference, value given",
1775 paramId + 1, f->fullName()->data());
1776 if (skipCufOnInvalidParams) {
1777 if (extraArgs) {
1778 int n = paramId >= numParams ? paramId - numParams + 1 : 0;
1779 ExtraArgs::deallocate(extraArgs, n);
1780 ar->m_varEnv = nullptr;
1781 paramId -= n;
1783 while (paramId >= 0) {
1784 m_stack.popTV();
1785 paramId--;
1787 m_stack.popAR();
1788 tvWriteNull(retval);
1789 return;
1795 if (m_fp) {
1796 reenterVM(retval, ar, savedSP);
1797 } else {
1798 assert(m_nestedVMs.size() == 0);
1799 enterVM(retval, ar);
1803 void VMExecutionContext::invokeFuncFew(TypedValue* retval,
1804 const Func* f,
1805 void* thisOrCls,
1806 StringData* invName,
1807 int argc, TypedValue* argv) {
1808 assert(retval);
1809 assert(f);
1810 // If this is a regular function, this_ and cls must be NULL
1811 assert(f->preClass() || !thisOrCls);
1812 // If this is a method, either this_ or cls must be non-NULL
1813 assert(!f->preClass() || thisOrCls);
1814 // If this is a static method, this_ must be NULL
1815 assert(!(f->attrs() & AttrStatic && !f->isClosureBody()) ||
1816 !ActRec::decodeThis(thisOrCls));
1817 // invName should only be non-NULL if we are calling __call or
1818 // __callStatic
1819 assert(!invName || f->name()->isame(s___call.get()) ||
1820 f->name()->isame(s___callStatic.get()));
1822 VMRegAnchor _;
1824 if (ObjectData* thiz = ActRec::decodeThis(thisOrCls)) {
1825 thiz->incRefCount();
1827 Cell* savedSP = m_stack.top();
1828 if (argc > kStackCheckReenterPadding - kNumActRecCells) {
1829 checkStack(m_stack, f);
1831 ActRec* ar = m_stack.allocA();
1832 ar->m_soff = 0;
1833 ar->m_savedRbp = 0;
1834 ar->m_func = f;
1835 ar->m_this = (ObjectData*)thisOrCls;
1836 ar->initNumArgs(argc);
1837 if (UNLIKELY(invName != nullptr)) {
1838 ar->setInvName(invName);
1839 } else {
1840 ar->m_varEnv = nullptr;
1843 #ifdef HPHP_TRACE
1844 if (m_fp == nullptr) {
1845 TRACE(1, "Reentry: enter %s(%p) from top-level\n",
1846 f->name()->data(), ar);
1847 } else {
1848 TRACE(1, "Reentry: enter %s(pc %p ar %p) from %s(%p)\n",
1849 f->name()->data(), m_pc, ar,
1850 m_fp->m_func ? m_fp->m_func->name()->data() : "unknownBuiltin", m_fp);
1852 #endif
1854 for (int i = 0; i < argc; i++) {
1855 *m_stack.allocTV() = *argv++;
1858 if (m_fp) {
1859 reenterVM(retval, ar, savedSP);
1860 } else {
1861 assert(m_nestedVMs.size() == 0);
1862 enterVM(retval, ar);
1866 void VMExecutionContext::invokeContFunc(const Func* f,
1867 ObjectData* this_,
1868 TypedValue* param /* = NULL */) {
1869 assert(f);
1870 assert(this_);
1872 EagerVMRegAnchor _;
1874 this_->incRefCount();
1876 Cell* savedSP = m_stack.top();
1878 // no need to check stack due to ReenterPadding
1879 assert(kStackCheckReenterPadding - kNumActRecCells >= 1);
1881 ActRec* ar = m_stack.allocA();
1882 ar->m_savedRbp = 0;
1883 ar->m_func = f;
1884 ar->m_soff = 0;
1885 ar->initNumArgs(param != nullptr ? 1 : 0);
1886 ar->setThis(this_);
1887 ar->setVarEnv(nullptr);
1889 if (param != nullptr) {
1890 tvDup(*param, *m_stack.allocTV());
1893 TypedValue retval;
1894 reenterVM(&retval, ar, savedSP);
1895 // Codegen for generator functions guarantees that they will return null
1896 assert(IS_NULL_TYPE(retval.m_type));
1899 void VMExecutionContext::invokeUnit(TypedValue* retval, Unit* unit) {
1900 Func* func = unit->getMain();
1901 invokeFunc(retval, func, null_array, nullptr, nullptr,
1902 m_globalVarEnv, nullptr, InvokePseudoMain);
1906 * Given a pointer to a VM frame, returns the previous VM frame in the call
1907 * stack. This function will also pass back by reference the previous PC (if
1908 * prevPc is non-null) and the previous SP (if prevSp is non-null).
1910 * If there is no previous VM frame, this function returns NULL and does not
1911 * set prevPc and prevSp.
1913 ActRec* VMExecutionContext::getPrevVMState(const ActRec* fp,
1914 Offset* prevPc /* = NULL */,
1915 TypedValue** prevSp /* = NULL */,
1916 bool* fromVMEntry /* = NULL */) {
1917 if (fp == nullptr) {
1918 return nullptr;
1920 ActRec* prevFp = fp->arGetSfp();
1921 if (prevFp != fp) {
1922 if (prevSp) {
1923 if (UNLIKELY(fp->m_func->isGenerator())) {
1924 *prevSp = (TypedValue*)prevFp - prevFp->m_func->numSlotsInFrame();
1925 } else {
1926 *prevSp = (TypedValue*)&fp[1];
1929 if (prevPc) *prevPc = prevFp->m_func->base() + fp->m_soff;
1930 if (fromVMEntry) *fromVMEntry = false;
1931 return prevFp;
1933 // Linear search from end of m_nestedVMs. In practice, we're probably
1934 // looking for something recently pushed.
1935 int i = m_nestedVMs.size() - 1;
1936 for (; i >= 0; --i) {
1937 if (m_nestedVMs[i].m_entryFP == fp) break;
1939 if (i == -1) return nullptr;
1940 const VMState& vmstate = m_nestedVMs[i].m_savedState;
1941 prevFp = vmstate.fp;
1942 assert(prevFp);
1943 assert(prevFp->m_func->unit());
1944 if (prevSp) *prevSp = vmstate.sp;
1945 if (prevPc) *prevPc = prevFp->m_func->unit()->offsetOf(vmstate.pc);
1946 if (fromVMEntry) *fromVMEntry = true;
1947 return prevFp;
1950 Array VMExecutionContext::debugBacktrace(bool skip /* = false */,
1951 bool withSelf /* = false */,
1952 bool withThis /* = false */,
1953 VMParserFrame*
1954 parserFrame /* = NULL */,
1955 bool ignoreArgs /* = false */,
1956 int limit /* = 0 */) {
1957 Array bt = Array::Create();
1959 // If there is a parser frame, put it at the beginning of
1960 // the backtrace
1961 if (parserFrame) {
1962 bt.append(
1963 ArrayInit(2)
1964 .set(s_file, parserFrame->filename, true)
1965 .set(s_line, parserFrame->lineNumber, true)
1966 .toVariant()
1970 VMRegAnchor _;
1971 if (!getFP()) {
1972 // If there are no VM frames, we're done
1973 return bt;
1976 int depth = 0;
1977 ActRec* fp = nullptr;
1978 Offset pc = 0;
1980 // Get the fp and pc of the top frame (possibly skipping one frame)
1982 if (skip) {
1983 fp = getPrevVMState(getFP(), &pc);
1984 if (!fp) {
1985 // We skipped over the only VM frame, we're done
1986 return bt;
1988 } else {
1989 fp = getFP();
1990 Unit *unit = getFP()->m_func->unit();
1991 assert(unit);
1992 pc = unit->offsetOf(m_pc);
1995 // Handle the top frame
1996 if (withSelf) {
1997 // Builtins don't have a file and line number
1998 if (!fp->m_func->isBuiltin()) {
1999 Unit *unit = fp->m_func->unit();
2000 assert(unit);
2001 const char* filename = unit->filepath()->data();
2002 if (fp->m_func->originalFilename()) {
2003 filename = fp->m_func->originalFilename()->data();
2005 assert(filename);
2006 Offset off = pc;
2008 ArrayInit frame(parserFrame ? 4 : 2);
2009 frame.set(s_file, filename, true);
2010 frame.set(s_line, unit->getLineNumber(off), true);
2011 if (parserFrame) {
2012 frame.set(s_function, s_include, true);
2013 frame.set(s_args, Array::Create(parserFrame->filename), true);
2015 bt.append(frame.toVariant());
2016 depth++;
2021 // Handle the subsequent VM frames
2022 Offset prevPc = 0;
2023 for (ActRec* prevFp = getPrevVMState(fp, &prevPc);
2024 fp != nullptr && (limit == 0 || depth < limit);
2025 fp = prevFp, pc = prevPc, prevFp = getPrevVMState(fp, &prevPc)) {
2026 // do not capture frame for HPHP only functions
2027 if (fp->m_func->isNoInjection()) {
2028 continue;
2031 ArrayInit frame(7);
2033 auto const curUnit = fp->m_func->unit();
2034 auto const curOp = toOp(*curUnit->at(pc));
2035 auto const isReturning = curOp == OpRetC || curOp == OpRetV;
2037 // Builtins and generators don't have a file and line number
2038 if (prevFp && !prevFp->m_func->isBuiltin() && !fp->m_func->isGenerator()) {
2039 auto const prevUnit = prevFp->m_func->unit();
2040 auto prevFile = prevUnit->filepath();
2041 if (prevFp->m_func->originalFilename()) {
2042 prevFile = prevFp->m_func->originalFilename();
2044 assert(prevFile);
2045 frame.set(s_file, const_cast<StringData*>(prevFile), true);
2047 // In the normal method case, the "saved pc" for line number printing is
2048 // pointing at the cell conversion (Unbox/Pop) instruction, not the call
2049 // itself. For multi-line calls, this instruction is associated with the
2050 // subsequent line which results in an off-by-n. We're subtracting one
2051 // in order to look up the line associated with the FCall/FCallArray
2052 // instruction. Exception handling and the other opcodes (ex. BoxR)
2053 // already do the right thing. The emitter associates object access with
2054 // the subsequent expression and this would be difficult to modify.
2055 auto const opAtPrevPc =
2056 toOp(*reinterpret_cast<const Opcode*>(prevUnit->at(prevPc)));
2057 Offset pcAdjust = 0;
2058 if (opAtPrevPc == OpPopR || opAtPrevPc == OpUnboxR) {
2059 pcAdjust = 1;
2061 frame.set(s_line,
2062 prevFp->m_func->unit()->getLineNumber(prevPc - pcAdjust),
2063 true);
2066 // check for include
2067 String funcname = const_cast<StringData*>(fp->m_func->name());
2068 if (fp->m_func->isGenerator()) {
2069 // retrieve the original function name from the inner continuation
2070 TypedValue* tv = frame_local(fp, 0);
2071 assert(tv->m_type == HPHP::KindOfObject);
2072 funcname = static_cast<c_Continuation*>(
2073 tv->m_data.pobj)->t_getorigfuncname();
2076 if (fp->m_func->isClosureBody()) {
2077 static StringData* s_closure_label =
2078 StringData::GetStaticString("{closure}");
2079 funcname = s_closure_label;
2082 // check for pseudomain
2083 if (funcname->empty()) {
2084 if (!prevFp) continue;
2085 funcname = s_include;
2088 frame.set(s_function, funcname, true);
2090 if (!funcname.same(s_include)) {
2091 // Closures have an m_this but they aren't in object context
2092 Class* ctx = arGetContextClass(fp);
2093 if (ctx != nullptr && !fp->m_func->isClosureBody()) {
2094 frame.set(s_class, ctx->name()->data(), true);
2095 if (fp->hasThis() && !isReturning) {
2096 if (withThis) {
2097 frame.set(s_object, Object(fp->getThis()), true);
2099 frame.set(s_type, "->", true);
2100 } else {
2101 frame.set(s_type, "::", true);
2106 Array args = Array::Create();
2107 if (ignoreArgs) {
2108 // do nothing
2109 } else if (funcname.same(s_include)) {
2110 if (depth) {
2111 args.append(const_cast<StringData*>(curUnit->filepath()));
2112 frame.set(s_args, args, true);
2114 } else if (!RuntimeOption::EnableArgsInBacktraces || isReturning) {
2115 // Provide an empty 'args' array to be consistent with hphpc
2116 frame.set(s_args, args, true);
2117 } else {
2118 int nparams = fp->m_func->numParams();
2119 int nargs = fp->numArgs();
2120 /* builtin extra args are not stored in varenv */
2121 if (nargs <= nparams) {
2122 for (int i = 0; i < nargs; i++) {
2123 TypedValue *arg = frame_local(fp, i);
2124 args.append(tvAsVariant(arg));
2126 } else {
2127 int i;
2128 for (i = 0; i < nparams; i++) {
2129 TypedValue *arg = frame_local(fp, i);
2130 args.append(tvAsVariant(arg));
2132 for (; i < nargs; i++) {
2133 TypedValue *arg = fp->getExtraArg(i - nparams);
2134 args.append(tvAsVariant(arg));
2137 frame.set(s_args, args, true);
2140 bt.append(frame.toVariant());
2141 depth++;
2143 return bt;
2146 MethodInfoVM::~MethodInfoVM() {
2147 for (std::vector<const ClassInfo::ParameterInfo*>::iterator it =
2148 parameters.begin(); it != parameters.end(); ++it) {
2149 if ((*it)->value != nullptr) {
2150 free((void*)(*it)->value);
2155 ClassInfoVM::~ClassInfoVM() {
2156 destroyMembers(m_methodsVec);
2157 destroyMapValues(m_properties);
2158 destroyMapValues(m_constants);
2161 Array VMExecutionContext::getUserFunctionsInfo() {
2162 // Return an array of all user-defined function names. This method is used to
2163 // support get_defined_functions().
2164 return Unit::getUserFunctions();
2167 Array VMExecutionContext::getConstantsInfo() {
2168 // Return an array of all defined constant:value pairs. This method is used
2169 // to support get_defined_constants().
2170 return Array::Create();
2173 const ClassInfo::MethodInfo* VMExecutionContext::findFunctionInfo(
2174 CStrRef name) {
2175 StringIMap<AtomicSmartPtr<MethodInfoVM> >::iterator it =
2176 m_functionInfos.find(name);
2177 if (it == m_functionInfos.end()) {
2178 Func* func = Unit::loadFunc(name.get());
2179 if (func == nullptr || func->builtinFuncPtr()) {
2180 return nullptr;
2182 AtomicSmartPtr<MethodInfoVM> &m = m_functionInfos[name];
2183 m = new MethodInfoVM();
2184 func->getFuncInfo(m.get());
2185 return m.get();
2186 } else {
2187 return it->second.get();
2191 const ClassInfo* VMExecutionContext::findClassInfo(CStrRef name) {
2192 if (name->empty()) return nullptr;
2193 StringIMap<AtomicSmartPtr<ClassInfoVM> >::iterator it =
2194 m_classInfos.find(name);
2195 if (it == m_classInfos.end()) {
2196 Class* cls = Unit::lookupClass(name.get());
2197 if (cls == nullptr) return nullptr;
2198 if (cls->clsInfo()) return cls->clsInfo();
2199 if (cls->attrs() & (AttrInterface | AttrTrait)) {
2200 // If the specified name matches with something that is not formally
2201 // a class, return NULL
2202 return nullptr;
2204 AtomicSmartPtr<ClassInfoVM> &c = m_classInfos[name];
2205 c = new ClassInfoVM();
2206 cls->getClassInfo(c.get());
2207 return c.get();
2208 } else {
2209 return it->second.get();
2213 const ClassInfo* VMExecutionContext::findInterfaceInfo(CStrRef name) {
2214 StringIMap<AtomicSmartPtr<ClassInfoVM> >::iterator it =
2215 m_interfaceInfos.find(name);
2216 if (it == m_interfaceInfos.end()) {
2217 Class* cls = Unit::lookupClass(name.get());
2218 if (cls == nullptr) return nullptr;
2219 if (cls->clsInfo()) return cls->clsInfo();
2220 if (!(cls->attrs() & AttrInterface)) {
2221 // If the specified name matches with something that is not formally
2222 // an interface, return NULL
2223 return nullptr;
2225 AtomicSmartPtr<ClassInfoVM> &c = m_interfaceInfos[name];
2226 c = new ClassInfoVM();
2227 cls->getClassInfo(c.get());
2228 return c.get();
2229 } else {
2230 return it->second.get();
2234 const ClassInfo* VMExecutionContext::findTraitInfo(CStrRef name) {
2235 StringIMap<AtomicSmartPtr<ClassInfoVM> >::iterator it =
2236 m_traitInfos.find(name);
2237 if (it != m_traitInfos.end()) {
2238 return it->second.get();
2240 Class* cls = Unit::lookupClass(name.get());
2241 if (cls == nullptr) return nullptr;
2242 if (cls->clsInfo()) return cls->clsInfo();
2243 if (!(cls->attrs() & AttrTrait)) {
2244 return nullptr;
2246 AtomicSmartPtr<ClassInfoVM> &classInfo = m_traitInfos[name];
2247 classInfo = new ClassInfoVM();
2248 cls->getClassInfo(classInfo.get());
2249 return classInfo.get();
2252 const ClassInfo::ConstantInfo* VMExecutionContext::findConstantInfo(
2253 CStrRef name) {
2254 TypedValue* tv = Unit::lookupCns(name.get());
2255 if (tv == nullptr) {
2256 return nullptr;
2258 ConstInfoMap::const_iterator it = m_constInfo.find(name.get());
2259 if (it != m_constInfo.end()) {
2260 return it->second;
2262 StringData* key = StringData::GetStaticString(name.get());
2263 ClassInfo::ConstantInfo* ci = new ClassInfo::ConstantInfo();
2264 ci->name = *(const String*)&key;
2265 ci->valueLen = 0;
2266 ci->valueText = "";
2267 ci->setValue(tvAsCVarRef(tv));
2268 m_constInfo[key] = ci;
2269 return ci;
2272 HPHP::Eval::PhpFile* VMExecutionContext::lookupPhpFile(StringData* path,
2273 const char* currentDir,
2274 bool* initial_opt) {
2275 bool init;
2276 bool &initial = initial_opt ? *initial_opt : init;
2277 initial = true;
2279 struct stat s;
2280 String spath = Eval::resolveVmInclude(path, currentDir, &s);
2281 if (spath.isNull()) return nullptr;
2283 // Check if this file has already been included.
2284 EvaledFilesMap::const_iterator it = m_evaledFiles.find(spath.get());
2285 HPHP::Eval::PhpFile* efile = nullptr;
2286 if (it != m_evaledFiles.end()) {
2287 // We found it! Return the unit.
2288 efile = it->second;
2289 initial = false;
2290 return efile;
2292 // We didn't find it, so try the realpath.
2293 bool alreadyResolved =
2294 RuntimeOption::RepoAuthoritative ||
2295 (!RuntimeOption::CheckSymLink && (spath[0] == '/'));
2296 bool hasRealpath = false;
2297 String rpath;
2298 if (!alreadyResolved) {
2299 std::string rp = StatCache::realpath(spath.data());
2300 if (rp.size() != 0) {
2301 rpath = NEW(StringData)(rp.data(), rp.size(), CopyString);
2302 if (!rpath.same(spath)) {
2303 hasRealpath = true;
2304 it = m_evaledFiles.find(rpath.get());
2305 if (it != m_evaledFiles.end()) {
2306 // We found it! Update the mapping for spath and
2307 // return the unit.
2308 efile = it->second;
2309 m_evaledFiles[spath.get()] = efile;
2310 spath.get()->incRefCount();
2311 initial = false;
2312 return efile;
2317 // This file hasn't been included yet, so we need to parse the file
2318 efile = HPHP::Eval::FileRepository::checkoutFile(
2319 hasRealpath ? rpath.get() : spath.get(), s);
2320 if (efile && initial_opt) {
2321 // if initial_opt is not set, this shouldnt be recorded as a
2322 // per request fetch of the file.
2323 if (Transl::TargetCache::testAndSetBit(efile->getId())) {
2324 initial = false;
2326 // if parsing was successful, update the mappings for spath and
2327 // rpath (if it exists).
2328 m_evaledFiles[spath.get()] = efile;
2329 spath.get()->incRefCount();
2330 // Don't incRef efile; checkoutFile() already counted it.
2331 if (hasRealpath) {
2332 m_evaledFiles[rpath.get()] = efile;
2333 rpath.get()->incRefCount();
2335 DEBUGGER_ATTACHED_ONLY(phpDebuggerFileLoadHook(efile));
2337 return efile;
2340 Unit* VMExecutionContext::evalInclude(StringData* path,
2341 const StringData* curUnitFilePath,
2342 bool* initial) {
2343 namespace fs = boost::filesystem;
2344 HPHP::Eval::PhpFile* efile = nullptr;
2345 if (curUnitFilePath) {
2346 fs::path currentUnit(curUnitFilePath->data());
2347 fs::path currentDir(currentUnit.branch_path());
2348 efile = lookupPhpFile(path, currentDir.string().c_str(), initial);
2349 } else {
2350 efile = lookupPhpFile(path, "", initial);
2352 if (efile) {
2353 return efile->unit();
2355 return nullptr;
2358 HPHP::Unit* VMExecutionContext::evalIncludeRoot(
2359 StringData* path, InclOpFlags flags, bool* initial) {
2360 HPHP::Eval::PhpFile* efile = lookupIncludeRoot(path, flags, initial);
2361 return efile ? efile->unit() : 0;
2364 HPHP::Eval::PhpFile* VMExecutionContext::lookupIncludeRoot(StringData* path,
2365 InclOpFlags flags,
2366 bool* initial,
2367 Unit* unit) {
2368 String absPath;
2369 if ((flags & InclOpRelative)) {
2370 namespace fs = boost::filesystem;
2371 if (!unit) unit = getFP()->m_func->unit();
2372 fs::path currentUnit(unit->filepath()->data());
2373 fs::path currentDir(currentUnit.branch_path());
2374 absPath = currentDir.string() + '/';
2375 TRACE(2, "lookupIncludeRoot(%s): relative -> %s\n",
2376 path->data(),
2377 absPath->data());
2378 } else {
2379 assert(flags & InclOpDocRoot);
2380 absPath = SourceRootInfo::GetCurrentPhpRoot();
2381 TRACE(2, "lookupIncludeRoot(%s): docRoot -> %s\n",
2382 path->data(),
2383 absPath->data());
2386 absPath += StrNR(path);
2388 EvaledFilesMap::const_iterator it = m_evaledFiles.find(absPath.get());
2389 if (it != m_evaledFiles.end()) {
2390 if (initial) *initial = false;
2391 return it->second;
2394 return lookupPhpFile(absPath.get(), "", initial);
2398 Instantiate hoistable classes and functions.
2399 If there is any more work left to do, setup a
2400 new frame ready to execute the pseudomain.
2402 return true iff the pseudomain needs to be executed.
2404 bool VMExecutionContext::evalUnit(Unit* unit, PC& pc, int funcType) {
2405 m_pc = pc;
2406 unit->merge();
2407 if (unit->isMergeOnly()) {
2408 Stats::inc(Stats::PseudoMain_Skipped);
2409 *m_stack.allocTV() = *unit->getMainReturn();
2410 return false;
2412 Stats::inc(Stats::PseudoMain_Executed);
2415 ActRec* ar = m_stack.allocA();
2416 assert((uintptr_t)&ar->m_func < (uintptr_t)&ar->m_r);
2417 Class* cls = curClass();
2418 if (m_fp->hasThis()) {
2419 ObjectData *this_ = m_fp->getThis();
2420 this_->incRefCount();
2421 ar->setThis(this_);
2422 } else if (m_fp->hasClass()) {
2423 ar->setClass(m_fp->getClass());
2424 } else {
2425 ar->setThis(nullptr);
2427 Func* func = unit->getMain(cls);
2428 assert(!func->info());
2429 assert(!func->isGenerator());
2430 ar->m_func = func;
2431 ar->initNumArgs(0);
2432 assert(getFP());
2433 assert(!m_fp->hasInvName());
2434 arSetSfp(ar, m_fp);
2435 ar->m_soff = uintptr_t(m_fp->m_func->unit()->offsetOf(pc) -
2436 m_fp->m_func->base());
2437 ar->m_savedRip =
2438 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedFrame());
2439 assert(isReturnHelper(ar->m_savedRip));
2440 pushLocalsAndIterators(func);
2441 if (!m_fp->hasVarEnv()) {
2442 m_fp->setVarEnv(VarEnv::createLocalOnStack(m_fp));
2444 ar->m_varEnv = m_fp->m_varEnv;
2445 ar->m_varEnv->attach(ar);
2447 m_fp = ar;
2448 pc = func->getEntry();
2449 SYNC();
2450 bool ret = EventHook::FunctionEnter(m_fp, funcType);
2451 pc = m_pc;
2452 return ret;
2455 StaticString
2456 s_php_namespace("<?php namespace "),
2457 s_curly_return(" { return "),
2458 s_semicolon_curly("; }"),
2459 s_php_return("<?php return "),
2460 s_semicolon(";");
2461 CVarRef VMExecutionContext::getEvaledArg(const StringData* val,
2462 CStrRef namespacedName) {
2463 CStrRef key = *(String*)&val;
2465 if (m_evaledArgs.get()) {
2466 CVarRef arg = m_evaledArgs.get()->get(key);
2467 if (&arg != &null_variant) return arg;
2470 String code;
2471 int pos = namespacedName.rfind('\\');
2472 if (pos != -1) {
2473 auto ns = namespacedName.substr(0, pos);
2474 code = s_php_namespace + ns + s_curly_return + key + s_semicolon_curly;
2475 } else {
2476 code = s_php_return + key + s_semicolon;
2478 Unit* unit = compileEvalString(code.get());
2479 assert(unit != nullptr);
2480 Variant v;
2481 // Default arg values are not currently allowed to depend on class context.
2482 g_vmContext->invokeFunc((TypedValue*)&v, unit->getMain(),
2483 null_array, nullptr, nullptr, nullptr, nullptr,
2484 InvokePseudoMain);
2485 Variant &lv = m_evaledArgs.lvalAt(key, AccessFlags::Key);
2486 lv = v;
2487 return lv;
2491 * Helper for function entry, including pseudo-main entry.
2493 void
2494 VMExecutionContext::pushLocalsAndIterators(const Func* func,
2495 int nparams /*= 0*/) {
2496 // Push locals.
2497 for (int i = nparams; i < func->numLocals(); i++) {
2498 m_stack.pushUninit();
2500 // Push iterators.
2501 for (int i = 0; i < func->numIterators(); i++) {
2502 m_stack.allocI();
2506 void VMExecutionContext::enqueueSharedVar(SharedVariant* svar) {
2507 m_freedSvars.push_back(svar);
2510 class FreedSVars : public Treadmill::WorkItem {
2511 SVarVector m_svars;
2512 public:
2513 explicit FreedSVars(SVarVector&& svars) : m_svars(std::move(svars)) {}
2514 virtual void operator()() {
2515 for (auto it = m_svars.begin(); it != m_svars.end(); it++) {
2516 delete *it;
2521 void VMExecutionContext::treadmillSharedVars() {
2522 Treadmill::WorkItem::enqueue(new FreedSVars(std::move(m_freedSvars)));
2525 void VMExecutionContext::destructObjects() {
2526 if (UNLIKELY(RuntimeOption::EnableObjDestructCall)) {
2527 while (!m_liveBCObjs.empty()) {
2528 ObjectData* obj = *m_liveBCObjs.begin();
2529 obj->destruct(); // Let the instance remove the node.
2531 m_liveBCObjs.clear();
2535 // Evaled units have a footprint in the TC and translation metadata. The
2536 // applications we care about tend to have few, short, stereotyped evals,
2537 // where the same code keeps getting eval'ed over and over again; so we
2538 // keep around units for each eval'ed string, so that the TC space isn't
2539 // wasted on each eval.
2540 typedef RankedCHM<StringData*, HPHP::Unit*,
2541 StringDataHashCompare,
2542 RankEvaledUnits> EvaledUnitsMap;
2543 static EvaledUnitsMap s_evaledUnits;
2544 Unit* VMExecutionContext::compileEvalString(StringData* code) {
2545 EvaledUnitsMap::accessor acc;
2546 // Promote this to a static string; otherwise it may get swept
2547 // across requests.
2548 code = StringData::GetStaticString(code);
2549 if (s_evaledUnits.insert(acc, code)) {
2550 acc->second = compile_string(code->data(), code->size());
2552 return acc->second;
2555 CStrRef VMExecutionContext::createFunction(CStrRef args, CStrRef code) {
2556 VMRegAnchor _;
2557 // It doesn't matter if there's a user function named __lambda_func; we only
2558 // use this name during parsing, and then change it to an impossible name
2559 // with a NUL byte before we merge it into the request's func map. This also
2560 // has the bonus feature that the value of __FUNCTION__ inside the created
2561 // function will match Zend. (Note: Zend will actually fatal if there's a
2562 // user function named __lambda_func when you call create_function. Huzzah!)
2563 static StringData* oldName = StringData::GetStaticString("__lambda_func");
2564 std::ostringstream codeStr;
2565 codeStr << "<?php function " << oldName->data()
2566 << "(" << args.data() << ") {"
2567 << code.data() << "}\n";
2568 StringData* evalCode = StringData::GetStaticString(codeStr.str());
2569 Unit* unit = compile_string(evalCode->data(), evalCode->size());
2570 // Move the function to a different name.
2571 std::ostringstream newNameStr;
2572 newNameStr << '\0' << "lambda_" << ++m_lambdaCounter;
2573 StringData* newName = StringData::GetStaticString(newNameStr.str());
2574 unit->renameFunc(oldName, newName);
2575 m_createdFuncs.push_back(unit);
2576 unit->merge();
2578 // Technically we shouldn't have to eval the unit right now (it'll execute
2579 // the pseudo-main, which should be empty) and could get away with just
2580 // mergeFuncs. However, Zend does it this way, as proven by the fact that you
2581 // can inject code into the evaled unit's pseudo-main:
2583 // create_function('', '} echo "hi"; if (0) {');
2585 // We have to eval now to emulate this behavior.
2586 TypedValue retval;
2587 invokeFunc(&retval, unit->getMain(), null_array,
2588 nullptr, nullptr, nullptr, nullptr,
2589 InvokePseudoMain);
2591 // __lambda_func will be the only hoistable function.
2592 // Any functions or closures defined in it will not be hoistable.
2593 Func* lambda = unit->firstHoistable();
2594 return lambda->nameRef();
2597 void VMExecutionContext::evalPHPDebugger(TypedValue* retval, StringData *code,
2598 int frame) {
2599 assert(retval);
2600 // The code has "<?php" prepended already
2601 Unit* unit = compileEvalString(code);
2602 if (unit == nullptr) {
2603 raise_error("Syntax error");
2604 tvWriteNull(retval);
2605 return;
2608 VarEnv *varEnv = nullptr;
2609 ActRec *fp = getFP();
2610 ActRec *cfpSave = nullptr;
2611 if (fp) {
2612 for (; frame > 0; --frame) {
2613 ActRec* prevFp = getPrevVMState(fp);
2614 if (!prevFp) {
2615 // To be safe in case we failed to get prevFp. This would mean we've
2616 // been asked to eval in a frame which is beyond the top of the stack.
2617 // This suggests the debugger client has made an error.
2618 break;
2620 fp = prevFp;
2622 if (!fp->hasVarEnv()) {
2623 fp->setVarEnv(VarEnv::createLocalOnHeap(fp));
2625 varEnv = fp->m_varEnv;
2626 cfpSave = varEnv->getCfp();
2628 ObjectData *this_ = nullptr;
2629 // NB: the ActRec and function within the AR may have different classes. The
2630 // class in the ActRec is the type used when invoking the function (i.e.,
2631 // Derived in Derived::Foo()) while the class obtained from the function is
2632 // the type that declared the function Foo, which may be Base. We need both
2633 // the class to match any object that this function may have been invoked on,
2634 // and we need the class from the function execution is stopped in.
2635 Class *frameClass = nullptr;
2636 Class *functionClass = nullptr;
2637 if (fp) {
2638 if (fp->hasThis()) {
2639 this_ = fp->getThis();
2640 } else if (fp->hasClass()) {
2641 frameClass = fp->getClass();
2643 functionClass = fp->m_func->cls();
2644 phpDebuggerEvalHook(fp->m_func);
2647 const static StaticString s_cppException("Hit an exception");
2648 const static StaticString s_phpException("Hit a php exception");
2649 const static StaticString s_exit("Hit exit");
2650 const static StaticString s_fatal("Hit fatal");
2651 try {
2652 // Invoke the given PHP, possibly specialized to match the type of the
2653 // current function on the stack, optionally passing a this pointer or
2654 // class used to execute the current function.
2655 invokeFunc(retval, unit->getMain(functionClass), null_array,
2656 this_, frameClass, varEnv, nullptr, InvokePseudoMain);
2657 } catch (FatalErrorException &e) {
2658 g_vmContext->write(s_fatal);
2659 g_vmContext->write(" : ");
2660 g_vmContext->write(e.getMessage().c_str());
2661 g_vmContext->write("\n");
2662 g_vmContext->write(ExtendedLogger::StringOfStackTrace(e.getBackTrace()));
2663 } catch (ExitException &e) {
2664 g_vmContext->write(s_exit.data());
2665 g_vmContext->write(" : ");
2666 std::ostringstream os;
2667 os << ExitException::ExitCode;
2668 g_vmContext->write(os.str());
2669 } catch (Eval::DebuggerException &e) {
2670 if (varEnv) {
2671 varEnv->setCfp(cfpSave);
2673 throw;
2674 } catch (Exception &e) {
2675 g_vmContext->write(s_cppException.data());
2676 g_vmContext->write(" : ");
2677 g_vmContext->write(e.getMessage().c_str());
2678 ExtendedException* ee = dynamic_cast<ExtendedException*>(&e);
2679 if (ee) {
2680 g_vmContext->write("\n");
2681 g_vmContext->write(
2682 ExtendedLogger::StringOfStackTrace(ee->getBackTrace()));
2684 } catch (Object &e) {
2685 g_vmContext->write(s_phpException.data());
2686 g_vmContext->write(" : ");
2687 g_vmContext->write(e->t___tostring().data());
2688 } catch (...) {
2689 g_vmContext->write(s_cppException.data());
2692 if (varEnv) {
2693 // The debugger eval frame may have attached to the VarEnv from a
2694 // frame that was not the top frame, so we need to manually set
2695 // cfp back to what it was before
2696 varEnv->setCfp(cfpSave);
2700 void VMExecutionContext::enterDebuggerDummyEnv() {
2701 static Unit* s_debuggerDummy = compile_string("<?php?>", 7);
2702 // Ensure that the VM stack is completely empty (m_fp should be null)
2703 // and that we're not in a nested VM (reentrancy)
2704 assert(getFP() == nullptr);
2705 assert(m_nestedVMs.size() == 0);
2706 assert(m_nesting == 0);
2707 assert(m_stack.count() == 0);
2708 ActRec* ar = m_stack.allocA();
2709 ar->m_func = s_debuggerDummy->getMain();
2710 ar->setThis(nullptr);
2711 ar->m_soff = 0;
2712 ar->m_savedRbp = 0;
2713 ar->m_savedRip = reinterpret_cast<uintptr_t>(tx()->getCallToExit());
2714 assert(isReturnHelper(ar->m_savedRip));
2715 m_fp = ar;
2716 m_pc = s_debuggerDummy->entry();
2717 m_firstAR = ar;
2718 m_fp->setVarEnv(m_globalVarEnv);
2719 m_globalVarEnv->attach(m_fp);
2722 void VMExecutionContext::exitDebuggerDummyEnv() {
2723 assert(m_globalVarEnv);
2724 // Ensure that m_fp is valid
2725 assert(getFP() != nullptr);
2726 // Ensure that m_fp points to the only frame on the call stack.
2727 // In other words, make sure there are no VM frames directly below
2728 // this one and that we are not in a nested VM (reentrancy)
2729 assert(m_fp->arGetSfp() == m_fp);
2730 assert(m_nestedVMs.size() == 0);
2731 assert(m_nesting == 0);
2732 // Teardown the frame we erected by enterDebuggerDummyEnv()
2733 const Func* func = m_fp->m_func;
2734 try {
2735 frame_free_locals_inl_no_hook<true>(m_fp, func->numLocals());
2736 } catch (...) {}
2737 m_stack.ndiscard(func->numSlotsInFrame());
2738 m_stack.discardAR();
2739 // After tearing down this frame, the VM stack should be completely empty
2740 assert(m_stack.count() == 0);
2741 m_fp = nullptr;
2742 m_pc = nullptr;
2745 // Identifies the set of return helpers that we may set m_savedRip to in an
2746 // ActRec.
2747 bool VMExecutionContext::isReturnHelper(uintptr_t address) {
2748 auto tcAddr = reinterpret_cast<Transl::TCA>(address);
2749 return ((tcAddr == tx()->getRetFromInterpretedFrame()) ||
2750 (tcAddr == tx()->getRetFromInterpretedGeneratorFrame()) ||
2751 (tcAddr == tx()->getCallToExit()));
2754 // Walk the stack and find any return address to jitted code and bash it to
2755 // the appropriate RetFromInterpreted*Frame helper. This ensures that we don't
2756 // return into jitted code and gives the system the proper chance to interpret
2757 // blacklisted tracelets.
2758 void VMExecutionContext::preventReturnsToTC() {
2759 assert(isDebuggerAttached());
2760 if (RuntimeOption::EvalJit) {
2761 ActRec *ar = getFP();
2762 while (ar) {
2763 if (!isReturnHelper(ar->m_savedRip) &&
2764 (tx()->isValidCodeAddress((Transl::TCA)ar->m_savedRip))) {
2765 TRACE_RB(2, "Replace RIP in fp %p, savedRip 0x%" PRIx64 ", "
2766 "func %s\n", ar, ar->m_savedRip,
2767 ar->m_func->fullName()->data());
2768 if (ar->m_func->isGenerator()) {
2769 ar->m_savedRip =
2770 reinterpret_cast<uintptr_t>(
2771 tx()->getRetFromInterpretedGeneratorFrame());
2772 } else {
2773 ar->m_savedRip =
2774 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedFrame());
2776 assert(isReturnHelper(ar->m_savedRip));
2778 ar = getPrevVMState(ar);
2783 static inline StringData* lookup_name(TypedValue* key) {
2784 return prepareKey(key);
2787 static inline void lookup_var(ActRec* fp,
2788 StringData*& name,
2789 TypedValue* key,
2790 TypedValue*& val) {
2791 name = lookup_name(key);
2792 const Func* func = fp->m_func;
2793 Id id = func->lookupVarId(name);
2794 if (id != kInvalidId) {
2795 val = frame_local(fp, id);
2796 } else {
2797 assert(!fp->hasInvName());
2798 if (fp->hasVarEnv()) {
2799 val = fp->m_varEnv->lookup(name);
2800 } else {
2801 val = nullptr;
2806 static inline void lookupd_var(ActRec* fp,
2807 StringData*& name,
2808 TypedValue* key,
2809 TypedValue*& val) {
2810 name = lookup_name(key);
2811 const Func* func = fp->m_func;
2812 Id id = func->lookupVarId(name);
2813 if (id != kInvalidId) {
2814 val = frame_local(fp, id);
2815 } else {
2816 assert(!fp->hasInvName());
2817 if (!fp->hasVarEnv()) {
2818 fp->setVarEnv(VarEnv::createLocalOnStack(fp));
2820 val = fp->m_varEnv->lookup(name);
2821 if (val == nullptr) {
2822 TypedValue tv;
2823 tvWriteNull(&tv);
2824 fp->m_varEnv->set(name, &tv);
2825 val = fp->m_varEnv->lookup(name);
2830 static inline void lookup_gbl(ActRec* fp,
2831 StringData*& name,
2832 TypedValue* key,
2833 TypedValue*& val) {
2834 name = lookup_name(key);
2835 assert(g_vmContext->m_globalVarEnv);
2836 val = g_vmContext->m_globalVarEnv->lookup(name);
2839 static inline void lookupd_gbl(ActRec* fp,
2840 StringData*& name,
2841 TypedValue* key,
2842 TypedValue*& val) {
2843 name = lookup_name(key);
2844 assert(g_vmContext->m_globalVarEnv);
2845 VarEnv* varEnv = g_vmContext->m_globalVarEnv;
2846 val = varEnv->lookup(name);
2847 if (val == nullptr) {
2848 TypedValue tv;
2849 tvWriteNull(&tv);
2850 varEnv->set(name, &tv);
2851 val = varEnv->lookup(name);
2855 static inline void lookup_sprop(ActRec* fp,
2856 TypedValue* clsRef,
2857 StringData*& name,
2858 TypedValue* key,
2859 TypedValue*& val,
2860 bool& visible,
2861 bool& accessible) {
2862 assert(clsRef->m_type == KindOfClass);
2863 name = lookup_name(key);
2864 Class* ctx = arGetContextClass(fp);
2865 val = clsRef->m_data.pcls->getSProp(ctx, name, visible, accessible);
2868 static inline void lookupClsRef(TypedValue* input,
2869 TypedValue* output,
2870 bool decRef = false) {
2871 const Class* class_ = nullptr;
2872 if (IS_STRING_TYPE(input->m_type)) {
2873 class_ = Unit::loadClass(input->m_data.pstr);
2874 if (class_ == nullptr) {
2875 output->m_type = KindOfNull;
2876 raise_error(Strings::UNKNOWN_CLASS, input->m_data.pstr->data());
2878 } else if (input->m_type == KindOfObject) {
2879 class_ = input->m_data.pobj->getVMClass();
2880 } else {
2881 output->m_type = KindOfNull;
2882 raise_error("Cls: Expected string or object");
2884 if (decRef) {
2885 tvRefcountedDecRef(input);
2887 output->m_data.pcls = const_cast<Class*>(class_);
2888 output->m_type = KindOfClass;
2891 static UNUSED int innerCount(const TypedValue* tv) {
2892 if (IS_REFCOUNTED_TYPE(tv->m_type)) {
2893 // We're using pref here arbitrarily; any refcounted union member works.
2894 return tv->m_data.pref->_count;
2896 return -1;
2899 static inline void ratchetRefs(TypedValue*& result, TypedValue& tvRef,
2900 TypedValue& tvRef2) {
2901 TRACE(5, "Ratchet: result %p(k%d c%d), ref %p(k%d c%d) ref2 %p(k%d c%d)\n",
2902 result, result->m_type, innerCount(result),
2903 &tvRef, tvRef.m_type, innerCount(&tvRef),
2904 &tvRef2, tvRef2.m_type, innerCount(&tvRef2));
2905 // Due to complications associated with ArrayAccess, it is possible to acquire
2906 // a reference as a side effect of vector operation processing. Such a
2907 // reference must be retained until after the next iteration is complete.
2908 // Therefore, move the reference from tvRef to tvRef2, so that the reference
2909 // will be released one iteration later. But only do this if tvRef was used in
2910 // this iteration, otherwise we may wipe out the last reference to something
2911 // that we need to stay alive until the next iteration.
2912 if (tvRef.m_type != KindOfUninit) {
2913 if (IS_REFCOUNTED_TYPE(tvRef2.m_type)) {
2914 tvDecRef(&tvRef2);
2915 TRACE(5, "Ratchet: decref tvref2\n");
2916 tvWriteUninit(&tvRef2);
2919 memcpy(&tvRef2, &tvRef, sizeof(TypedValue));
2920 tvWriteUninit(&tvRef);
2921 // Update result to point to relocated reference. This can be done
2922 // unconditionally here because we maintain the invariant throughout that
2923 // either tvRef is KindOfUninit, or tvRef contains a valid object that
2924 // result points to.
2925 assert(result == &tvRef);
2926 result = &tvRef2;
2930 #define DECLARE_MEMBERHELPER_ARGS \
2931 unsigned ndiscard; \
2932 TypedValue* base; \
2933 TypedValue tvScratch; \
2934 TypedValue tvLiteral; \
2935 Variant tvRef; \
2936 Variant tvRef2; \
2937 MemberCode mcode = MEL; \
2938 TypedValue* curMember = 0;
2939 #define DECLARE_SETHELPER_ARGS DECLARE_MEMBERHELPER_ARGS
2940 #define DECLARE_GETHELPER_ARGS \
2941 DECLARE_MEMBERHELPER_ARGS \
2942 TypedValue* tvRet;
2944 #define MEMBERHELPERPRE_ARGS \
2945 pc, ndiscard, base, tvScratch, tvLiteral, \
2946 *tvRef.asTypedValue(), *tvRef2.asTypedValue(), mcode, curMember
2948 #define MEMBERHELPERPRE_OUT \
2949 pc, ndiscard, base, tvScratch, tvLiteral, \
2950 tvRef, tvRef2, mcode, curMember
2952 // The following arguments are outputs:
2953 // pc: bytecode instruction after the vector instruction
2954 // ndiscard: number of stack elements to discard
2955 // base: ultimate result of the vector-get
2956 // tvScratch: temporary result storage
2957 // tvRef: temporary result storage
2958 // tvRef2: temporary result storage
2959 // mcode: output MemberCode for the last member if LeaveLast
2960 // curMember: output last member value one if LeaveLast; but undefined
2961 // if the last mcode == MW
2963 // If saveResult is true, then upon completion of getHelperPre(),
2964 // tvScratch contains a reference to the result (a duplicate of what
2965 // base refers to). getHelperPost<true>(...) then saves the result
2966 // to its final location.
2967 template <bool warn,
2968 bool saveResult,
2969 VMExecutionContext::VectorLeaveCode mleave>
2970 inline void OPTBLD_INLINE VMExecutionContext::getHelperPre(
2971 PC& pc,
2972 unsigned& ndiscard,
2973 TypedValue*& base,
2974 TypedValue& tvScratch,
2975 TypedValue& tvLiteral,
2976 TypedValue& tvRef,
2977 TypedValue& tvRef2,
2978 MemberCode& mcode,
2979 TypedValue*& curMember) {
2980 memberHelperPre<false, warn, false, false,
2981 false, 0, mleave, saveResult>(MEMBERHELPERPRE_OUT);
2984 #define GETHELPERPOST_ARGS ndiscard, tvRet, tvScratch, tvRef, tvRef2
2985 template <bool saveResult>
2986 inline void OPTBLD_INLINE VMExecutionContext::getHelperPost(
2987 unsigned ndiscard, TypedValue*& tvRet, TypedValue& tvScratch,
2988 Variant& tvRef, Variant& tvRef2) {
2989 // Clean up all ndiscard elements on the stack. Actually discard
2990 // only ndiscard - 1, and overwrite the last cell with the result,
2991 // or if ndiscard is zero we actually need to allocate a cell.
2992 for (unsigned depth = 0; depth < ndiscard; ++depth) {
2993 TypedValue* tv = m_stack.indTV(depth);
2994 tvRefcountedDecRef(tv);
2997 if (!ndiscard) {
2998 tvRet = m_stack.allocTV();
2999 } else {
3000 m_stack.ndiscard(ndiscard - 1);
3001 tvRet = m_stack.topTV();
3004 if (saveResult) {
3005 // If tvRef wasn't just allocated, we've already decref'd it in
3006 // the loop above.
3007 memcpy(tvRet, &tvScratch, sizeof(TypedValue));
3011 #define GETHELPER_ARGS \
3012 pc, ndiscard, tvRet, base, tvScratch, tvLiteral, \
3013 tvRef, tvRef2, mcode, curMember
3014 inline void OPTBLD_INLINE
3015 VMExecutionContext::getHelper(PC& pc,
3016 unsigned& ndiscard,
3017 TypedValue*& tvRet,
3018 TypedValue*& base,
3019 TypedValue& tvScratch,
3020 TypedValue& tvLiteral,
3021 Variant& tvRef,
3022 Variant& tvRef2,
3023 MemberCode& mcode,
3024 TypedValue*& curMember) {
3025 getHelperPre<true, true, VectorLeaveCode::ConsumeAll>(MEMBERHELPERPRE_ARGS);
3026 getHelperPost<true>(GETHELPERPOST_ARGS);
3029 void
3030 VMExecutionContext::getElem(TypedValue* base, TypedValue* key,
3031 TypedValue* dest) {
3032 assert(base->m_type != KindOfArray);
3033 VMRegAnchor _;
3034 tvWriteUninit(dest);
3035 TypedValue* result = Elem<true>(*dest, *dest, base, key);
3036 if (result != dest) {
3037 tvDup(*result, *dest);
3041 template <bool setMember,
3042 bool warn,
3043 bool define,
3044 bool unset,
3045 bool reffy,
3046 unsigned mdepth, // extra args on stack for set (e.g. rhs)
3047 VMExecutionContext::VectorLeaveCode mleave,
3048 bool saveResult>
3049 inline bool OPTBLD_INLINE VMExecutionContext::memberHelperPre(
3050 PC& pc, unsigned& ndiscard, TypedValue*& base,
3051 TypedValue& tvScratch, TypedValue& tvLiteral,
3052 TypedValue& tvRef, TypedValue& tvRef2,
3053 MemberCode& mcode, TypedValue*& curMember) {
3054 // The caller must move pc to the vector immediate before calling
3055 // {get, set}HelperPre.
3056 const ImmVector immVec = ImmVector::createFromStream(pc);
3057 const uint8_t* vec = immVec.vec();
3058 assert(immVec.size() > 0);
3060 // PC needs to be advanced before we do anything, otherwise if we
3061 // raise a notice in the middle of this we could resume at the wrong
3062 // instruction.
3063 pc += immVec.size() + sizeof(int32_t) + sizeof(int32_t);
3065 if (!setMember) {
3066 assert(mdepth == 0);
3067 assert(!define);
3068 assert(!unset);
3071 ndiscard = immVec.numStackValues();
3072 int depth = mdepth + ndiscard - 1;
3073 const LocationCode lcode = LocationCode(*vec++);
3075 TypedValue* loc = nullptr;
3076 TypedValue dummy;
3077 Class* const ctx = arGetContextClass(getFP());
3079 StringData* name;
3080 TypedValue* fr = nullptr;
3081 TypedValue* cref;
3082 TypedValue* pname;
3083 tvWriteUninit(&tvScratch);
3085 switch (lcode) {
3086 case LNL:
3087 loc = frame_local_inner(m_fp, decodeVariableSizeImm(&vec));
3088 goto lcodeName;
3089 case LNC:
3090 loc = m_stack.indTV(depth--);
3091 goto lcodeName;
3093 lcodeName:
3094 if (define) {
3095 lookupd_var(m_fp, name, loc, fr);
3096 } else {
3097 lookup_var(m_fp, name, loc, fr);
3099 if (fr == nullptr) {
3100 if (warn) {
3101 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3103 tvWriteNull(&dummy);
3104 loc = &dummy;
3105 } else {
3106 loc = fr;
3108 decRefStr(name);
3109 break;
3111 case LGL:
3112 loc = frame_local_inner(m_fp, decodeVariableSizeImm(&vec));
3113 goto lcodeGlobal;
3114 case LGC:
3115 loc = m_stack.indTV(depth--);
3116 goto lcodeGlobal;
3118 lcodeGlobal:
3119 if (define) {
3120 lookupd_gbl(m_fp, name, loc, fr);
3121 } else {
3122 lookup_gbl(m_fp, name, loc, fr);
3124 if (fr == nullptr) {
3125 if (warn) {
3126 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3128 tvWriteNull(&dummy);
3129 loc = &dummy;
3130 } else {
3131 loc = fr;
3133 decRefStr(name);
3134 break;
3136 case LSC:
3137 cref = m_stack.indTV(mdepth);
3138 pname = m_stack.indTV(depth--);
3139 goto lcodeSprop;
3140 case LSL:
3141 cref = m_stack.indTV(mdepth);
3142 pname = frame_local_inner(m_fp, decodeVariableSizeImm(&vec));
3143 goto lcodeSprop;
3145 lcodeSprop: {
3146 bool visible, accessible;
3147 assert(cref->m_type == KindOfClass);
3148 const Class* class_ = cref->m_data.pcls;
3149 StringData* name = lookup_name(pname);
3150 loc = class_->getSProp(ctx, name, visible, accessible);
3151 if (!(visible && accessible)) {
3152 raise_error("Invalid static property access: %s::%s",
3153 class_->name()->data(),
3154 name->data());
3156 decRefStr(name);
3157 break;
3160 case LL: {
3161 int localInd = decodeVariableSizeImm(&vec);
3162 loc = frame_local_inner(m_fp, localInd);
3163 if (warn) {
3164 if (loc->m_type == KindOfUninit) {
3165 raise_notice(Strings::UNDEFINED_VARIABLE,
3166 m_fp->m_func->localVarName(localInd)->data());
3169 break;
3171 case LC:
3172 case LR:
3173 loc = m_stack.indTV(depth--);
3174 break;
3175 case LH:
3176 assert(m_fp->hasThis());
3177 tvScratch.m_type = KindOfObject;
3178 tvScratch.m_data.pobj = m_fp->getThis();
3179 loc = &tvScratch;
3180 break;
3182 default: not_reached();
3185 base = loc;
3186 tvWriteUninit(&tvLiteral);
3187 tvWriteUninit(&tvRef);
3188 tvWriteUninit(&tvRef2);
3190 // Iterate through the members.
3191 while (vec < pc) {
3192 mcode = MemberCode(*vec++);
3193 if (memberCodeHasImm(mcode)) {
3194 int64_t memberImm = decodeMemberCodeImm(&vec, mcode);
3195 if (memberCodeImmIsString(mcode)) {
3196 tvAsVariant(&tvLiteral) =
3197 m_fp->m_func->unit()->lookupLitstrId(memberImm);
3198 assert(!IS_REFCOUNTED_TYPE(tvLiteral.m_type));
3199 curMember = &tvLiteral;
3200 } else if (mcode == MEI) {
3201 tvAsVariant(&tvLiteral) = memberImm;
3202 curMember = &tvLiteral;
3203 } else {
3204 assert(memberCodeImmIsLoc(mcode));
3205 curMember = frame_local_inner(m_fp, memberImm);
3207 } else {
3208 curMember = (setMember && mcode == MW) ? nullptr : m_stack.indTV(depth--);
3211 if (mleave == VectorLeaveCode::LeaveLast) {
3212 if (vec >= pc) {
3213 assert(vec == pc);
3214 break;
3218 TypedValue* result;
3219 switch (mcode) {
3220 case MEL:
3221 case MEC:
3222 case MET:
3223 case MEI:
3224 if (unset) {
3225 result = ElemU(tvScratch, tvRef, base, curMember);
3226 } else if (define) {
3227 result = ElemD<warn,reffy>(tvScratch, tvRef, base, curMember);
3228 } else {
3229 result = Elem<warn>(tvScratch, tvRef, base, curMember);
3231 break;
3232 case MPL:
3233 case MPC:
3234 case MPT:
3235 result = Prop<warn, define, unset>(tvScratch, tvRef, ctx, base,
3236 curMember);
3237 break;
3238 case MW:
3239 if (setMember) {
3240 assert(define);
3241 result = NewElem(tvScratch, tvRef, base);
3242 } else {
3243 raise_error("Cannot use [] for reading");
3244 result = nullptr;
3246 break;
3247 default:
3248 assert(false);
3249 result = nullptr; // Silence compiler warning.
3251 assert(result != nullptr);
3252 ratchetRefs(result, tvRef, tvRef2);
3253 // Check whether an error occurred (i.e. no result was set).
3254 if (setMember && result == &tvScratch && result->m_type == KindOfUninit) {
3255 return true;
3257 base = result;
3260 if (mleave == VectorLeaveCode::ConsumeAll) {
3261 assert(vec == pc);
3262 if (debug) {
3263 if (lcode == LSC || lcode == LSL) {
3264 assert(depth == int(mdepth));
3265 } else {
3266 assert(depth == int(mdepth) - 1);
3271 if (saveResult) {
3272 assert(!setMember);
3273 // If requested, save a copy of the result. If base already points to
3274 // tvScratch, no reference counting is necessary, because (with the
3275 // exception of the following block), tvScratch is never populated such
3276 // that it owns a reference that must be accounted for.
3277 if (base != &tvScratch) {
3278 // Acquire a reference to the result via tvDup(); base points to the
3279 // result but does not own a reference.
3280 tvDup(*base, tvScratch);
3284 return false;
3287 // The following arguments are outputs: (TODO put them in struct)
3288 // pc: bytecode instruction after the vector instruction
3289 // ndiscard: number of stack elements to discard
3290 // base: ultimate result of the vector-get
3291 // tvScratch: temporary result storage
3292 // tvRef: temporary result storage
3293 // tvRef2: temporary result storage
3294 // mcode: output MemberCode for the last member if LeaveLast
3295 // curMember: output last member value one if LeaveLast; but undefined
3296 // if the last mcode == MW
3297 template <bool warn,
3298 bool define,
3299 bool unset,
3300 bool reffy,
3301 unsigned mdepth, // extra args on stack for set (e.g. rhs)
3302 VMExecutionContext::VectorLeaveCode mleave>
3303 inline bool OPTBLD_INLINE VMExecutionContext::setHelperPre(
3304 PC& pc, unsigned& ndiscard, TypedValue*& base,
3305 TypedValue& tvScratch, TypedValue& tvLiteral,
3306 TypedValue& tvRef, TypedValue& tvRef2,
3307 MemberCode& mcode, TypedValue*& curMember) {
3308 return memberHelperPre<true, warn, define, unset,
3309 reffy, mdepth, mleave, false>(MEMBERHELPERPRE_OUT);
3312 #define SETHELPERPOST_ARGS ndiscard, tvRef, tvRef2
3313 template <unsigned mdepth>
3314 inline void OPTBLD_INLINE VMExecutionContext::setHelperPost(
3315 unsigned ndiscard, Variant& tvRef, Variant& tvRef2) {
3316 // Clean up the stack. Decref all the elements for the vector, but
3317 // leave the first mdepth (they are not part of the vector data).
3318 for (unsigned depth = mdepth; depth-mdepth < ndiscard; ++depth) {
3319 TypedValue* tv = m_stack.indTV(depth);
3320 tvRefcountedDecRef(tv);
3323 // NOTE: currently the only instructions using this that have return
3324 // values on the stack also have more inputs than the -vector, so
3325 // mdepth > 0. They also always return the original top value of
3326 // the stack.
3327 if (mdepth > 0) {
3328 assert(mdepth == 1 &&
3329 "We don't really support mdepth > 1 in setHelperPost");
3331 if (ndiscard > 0) {
3332 TypedValue* retSrc = m_stack.topTV();
3333 TypedValue* dest = m_stack.indTV(ndiscard + mdepth - 1);
3334 assert(dest != retSrc);
3335 memcpy(dest, retSrc, sizeof *dest);
3339 m_stack.ndiscard(ndiscard);
3342 inline void OPTBLD_INLINE VMExecutionContext::iopLowInvalid(PC& pc) {
3343 fprintf(stderr, "invalid bytecode executed\n");
3344 abort();
3347 inline void OPTBLD_INLINE VMExecutionContext::iopNop(PC& pc) {
3348 NEXT();
3351 inline void OPTBLD_INLINE VMExecutionContext::iopPopC(PC& pc) {
3352 NEXT();
3353 m_stack.popC();
3356 inline void OPTBLD_INLINE VMExecutionContext::iopPopV(PC& pc) {
3357 NEXT();
3358 m_stack.popV();
3361 inline void OPTBLD_INLINE VMExecutionContext::iopPopR(PC& pc) {
3362 NEXT();
3363 if (m_stack.topTV()->m_type != KindOfRef) {
3364 m_stack.popC();
3365 } else {
3366 m_stack.popV();
3370 inline void OPTBLD_INLINE VMExecutionContext::iopDup(PC& pc) {
3371 NEXT();
3372 m_stack.dup();
3375 inline void OPTBLD_INLINE VMExecutionContext::iopBox(PC& pc) {
3376 NEXT();
3377 m_stack.box();
3380 inline void OPTBLD_INLINE VMExecutionContext::iopUnbox(PC& pc) {
3381 NEXT();
3382 m_stack.unbox();
3385 inline void OPTBLD_INLINE VMExecutionContext::iopBoxR(PC& pc) {
3386 NEXT();
3387 TypedValue* tv = m_stack.topTV();
3388 if (tv->m_type != KindOfRef) {
3389 tvBox(tv);
3393 inline void OPTBLD_INLINE VMExecutionContext::iopUnboxR(PC& pc) {
3394 NEXT();
3395 if (m_stack.topTV()->m_type == KindOfRef) {
3396 m_stack.unbox();
3400 inline void OPTBLD_INLINE VMExecutionContext::iopNull(PC& pc) {
3401 NEXT();
3402 m_stack.pushNull();
3405 inline void OPTBLD_INLINE VMExecutionContext::iopNullUninit(PC& pc) {
3406 NEXT();
3407 m_stack.pushNullUninit();
3410 inline void OPTBLD_INLINE VMExecutionContext::iopTrue(PC& pc) {
3411 NEXT();
3412 m_stack.pushTrue();
3415 inline void OPTBLD_INLINE VMExecutionContext::iopFalse(PC& pc) {
3416 NEXT();
3417 m_stack.pushFalse();
3420 inline void OPTBLD_INLINE VMExecutionContext::iopFile(PC& pc) {
3421 NEXT();
3422 const StringData* s = m_fp->m_func->unit()->filepath();
3423 m_stack.pushStaticString(const_cast<StringData*>(s));
3426 inline void OPTBLD_INLINE VMExecutionContext::iopDir(PC& pc) {
3427 NEXT();
3428 const StringData* s = m_fp->m_func->unit()->dirpath();
3429 m_stack.pushStaticString(const_cast<StringData*>(s));
3432 inline void OPTBLD_INLINE VMExecutionContext::iopInt(PC& pc) {
3433 NEXT();
3434 DECODE(int64_t, i);
3435 m_stack.pushInt(i);
3438 inline void OPTBLD_INLINE VMExecutionContext::iopDouble(PC& pc) {
3439 NEXT();
3440 DECODE(double, d);
3441 m_stack.pushDouble(d);
3444 inline void OPTBLD_INLINE VMExecutionContext::iopString(PC& pc) {
3445 NEXT();
3446 DECODE_LITSTR(s);
3447 m_stack.pushStaticString(s);
3450 inline void OPTBLD_INLINE VMExecutionContext::iopArray(PC& pc) {
3451 NEXT();
3452 DECODE(Id, id);
3453 ArrayData* a = m_fp->m_func->unit()->lookupArrayId(id);
3454 m_stack.pushStaticArray(a);
3457 inline void OPTBLD_INLINE VMExecutionContext::iopNewArray(PC& pc) {
3458 NEXT();
3459 // Clever sizing avoids extra work in HphpArray construction.
3460 auto arr = ArrayData::Make(size_t(3U) << (HphpArray::MinLgTableSize-2));
3461 m_stack.pushArray(arr);
3464 inline void OPTBLD_INLINE VMExecutionContext::iopNewTuple(PC& pc) {
3465 NEXT();
3466 DECODE_IVA(n);
3467 // This constructor moves values, no inc/decref is necessary.
3468 HphpArray* arr = ArrayData::Make(n, m_stack.topC());
3469 m_stack.ndiscard(n);
3470 m_stack.pushArray(arr);
3473 inline void OPTBLD_INLINE VMExecutionContext::iopAddElemC(PC& pc) {
3474 NEXT();
3475 Cell* c1 = m_stack.topC();
3476 Cell* c2 = m_stack.indC(1);
3477 Cell* c3 = m_stack.indC(2);
3478 if (c3->m_type != KindOfArray) {
3479 raise_error("AddElemC: $3 must be an array");
3481 if (c2->m_type == KindOfInt64) {
3482 tvCellAsVariant(c3).asArrRef().set(c2->m_data.num, tvAsCVarRef(c1));
3483 } else {
3484 tvCellAsVariant(c3).asArrRef().set(tvAsCVarRef(c2), tvAsCVarRef(c1));
3486 m_stack.popC();
3487 m_stack.popC();
3490 inline void OPTBLD_INLINE VMExecutionContext::iopAddElemV(PC& pc) {
3491 NEXT();
3492 Var* v1 = m_stack.topV();
3493 Cell* c2 = m_stack.indC(1);
3494 Cell* c3 = m_stack.indC(2);
3495 if (c3->m_type != KindOfArray) {
3496 raise_error("AddElemV: $3 must be an array");
3498 if (c2->m_type == KindOfInt64) {
3499 tvCellAsVariant(c3).asArrRef().set(c2->m_data.num, ref(tvAsCVarRef(v1)));
3500 } else {
3501 tvCellAsVariant(c3).asArrRef().set(tvAsCVarRef(c2), ref(tvAsCVarRef(v1)));
3503 m_stack.popV();
3504 m_stack.popC();
3507 inline void OPTBLD_INLINE VMExecutionContext::iopAddNewElemC(PC& pc) {
3508 NEXT();
3509 Cell* c1 = m_stack.topC();
3510 Cell* c2 = m_stack.indC(1);
3511 if (c2->m_type != KindOfArray) {
3512 raise_error("AddNewElemC: $2 must be an array");
3514 tvCellAsVariant(c2).asArrRef().append(tvAsCVarRef(c1));
3515 m_stack.popC();
3518 inline void OPTBLD_INLINE VMExecutionContext::iopAddNewElemV(PC& pc) {
3519 NEXT();
3520 Var* v1 = m_stack.topV();
3521 Cell* c2 = m_stack.indC(1);
3522 if (c2->m_type != KindOfArray) {
3523 raise_error("AddNewElemV: $2 must be an array");
3525 tvCellAsVariant(c2).asArrRef().append(ref(tvAsCVarRef(v1)));
3526 m_stack.popV();
3529 inline void OPTBLD_INLINE VMExecutionContext::iopNewCol(PC& pc) {
3530 NEXT();
3531 DECODE_IVA(cType);
3532 DECODE_IVA(nElms);
3533 ObjectData* obj;
3534 switch (cType) {
3535 case Collection::VectorType: obj = NEWOBJ(c_Vector)(); break;
3536 case Collection::MapType: obj = NEWOBJ(c_Map)(); break;
3537 case Collection::StableMapType: obj = NEWOBJ(c_StableMap)(); break;
3538 case Collection::SetType: obj = NEWOBJ(c_Set)(); break;
3539 case Collection::PairType: obj = NEWOBJ(c_Pair)(); break;
3540 default:
3541 obj = nullptr;
3542 raise_error("NewCol: Invalid collection type");
3543 break;
3545 // Reserve enough room for nElms elements in advance
3546 if (nElms) {
3547 collectionReserve(obj, nElms);
3549 m_stack.pushObject(obj);
3552 inline void OPTBLD_INLINE VMExecutionContext::iopColAddNewElemC(PC& pc) {
3553 NEXT();
3554 Cell* c1 = m_stack.topC();
3555 Cell* c2 = m_stack.indC(1);
3556 if (c2->m_type == KindOfObject && c2->m_data.pobj->isCollection()) {
3557 collectionAppend(c2->m_data.pobj, c1);
3558 } else {
3559 raise_error("ColAddNewElemC: $2 must be a collection");
3561 m_stack.popC();
3564 inline void OPTBLD_INLINE VMExecutionContext::iopColAddElemC(PC& pc) {
3565 NEXT();
3566 Cell* c1 = m_stack.topC();
3567 Cell* c2 = m_stack.indC(1);
3568 Cell* c3 = m_stack.indC(2);
3569 if (c3->m_type == KindOfObject && c3->m_data.pobj->isCollection()) {
3570 collectionSet(c3->m_data.pobj, c2, c1);
3571 } else {
3572 raise_error("ColAddElemC: $3 must be a collection");
3574 m_stack.popC();
3575 m_stack.popC();
3578 inline void OPTBLD_INLINE VMExecutionContext::iopCns(PC& pc) {
3579 NEXT();
3580 DECODE_LITSTR(s);
3581 TypedValue* cns = Unit::loadCns(s);
3582 if (cns == nullptr) {
3583 raise_notice(Strings::UNDEFINED_CONSTANT, s->data(), s->data());
3584 m_stack.pushStaticString(s);
3585 return;
3587 Cell* c1 = m_stack.allocC();
3588 tvReadCell(cns, c1);
3591 inline void OPTBLD_INLINE VMExecutionContext::iopCnsE(PC& pc) {
3592 NEXT();
3593 DECODE_LITSTR(s);
3594 TypedValue* cns = Unit::loadCns(s);
3595 if (cns == nullptr) {
3596 raise_error("Undefined constant '%s'", s->data());
3598 Cell* c1 = m_stack.allocC();
3599 tvReadCell(cns, c1);
3602 inline void OPTBLD_INLINE VMExecutionContext::iopCnsU(PC& pc) {
3603 NEXT();
3604 DECODE_LITSTR(name);
3605 DECODE_LITSTR(fallback);
3606 TypedValue* cns = Unit::loadCns(name);
3607 if (cns == nullptr) {
3608 cns = Unit::loadCns(fallback);
3609 if (cns == nullptr) {
3610 raise_notice(
3611 Strings::UNDEFINED_CONSTANT,
3612 fallback->data(),
3613 fallback->data()
3615 m_stack.pushStaticString(fallback);
3616 return;
3619 Cell* c1 = m_stack.allocC();
3620 tvReadCell(cns, c1);
3623 inline void OPTBLD_INLINE VMExecutionContext::iopDefCns(PC& pc) {
3624 NEXT();
3625 DECODE_LITSTR(s);
3626 TypedValue* tv = m_stack.topTV();
3627 tvAsVariant(tv) = Unit::defCns(s, tv);
3630 inline void OPTBLD_INLINE VMExecutionContext::iopClsCns(PC& pc) {
3631 NEXT();
3632 DECODE_LITSTR(clsCnsName);
3633 TypedValue* tv = m_stack.topTV();
3634 assert(tv->m_type == KindOfClass);
3635 Class* class_ = tv->m_data.pcls;
3636 assert(class_ != nullptr);
3637 TypedValue* clsCns = class_->clsCnsGet(clsCnsName);
3638 if (clsCns == nullptr) {
3639 raise_error("Couldn't find constant %s::%s",
3640 class_->name()->data(), clsCnsName->data());
3642 tvReadCell(clsCns, tv);
3645 inline void OPTBLD_INLINE VMExecutionContext::iopClsCnsD(PC& pc) {
3646 NEXT();
3647 DECODE_LITSTR(clsCnsName);
3648 DECODE(Id, classId);
3649 const NamedEntityPair& classNamedEntity =
3650 m_fp->m_func->unit()->lookupNamedEntityPairId(classId);
3652 TypedValue* clsCns = lookupClsCns(classNamedEntity.second,
3653 classNamedEntity.first, clsCnsName);
3654 assert(clsCns != nullptr);
3655 Cell* c1 = m_stack.allocC();
3656 tvReadCell(clsCns, c1);
3659 inline void OPTBLD_INLINE VMExecutionContext::iopConcat(PC& pc) {
3660 NEXT();
3661 Cell* c1 = m_stack.topC();
3662 Cell* c2 = m_stack.indC(1);
3663 if (IS_STRING_TYPE(c1->m_type) && IS_STRING_TYPE(c2->m_type)) {
3664 tvCellAsVariant(c2) = concat(
3665 tvCellAsVariant(c2).toString(), tvCellAsCVarRef(c1).toString());
3666 } else {
3667 tvCellAsVariant(c2) = concat(tvCellAsVariant(c2).toString(),
3668 tvCellAsCVarRef(c1).toString());
3670 assert(c2->m_data.pstr->getCount() > 0);
3671 m_stack.popC();
3674 inline void OPTBLD_INLINE VMExecutionContext::iopNot(PC& pc) {
3675 NEXT();
3676 Cell* c1 = m_stack.topC();
3677 tvCellAsVariant(c1) = !tvCellAsVariant(c1).toBoolean();
3680 template<class Op>
3681 void OPTBLD_INLINE VMExecutionContext::implCellBinOp(PC& pc, Op op) {
3682 NEXT();
3683 auto const c1 = m_stack.topC();
3684 auto const c2 = m_stack.indC(1);
3685 auto const result = op(*c2, *c1);
3686 tvRefcountedDecRefCell(c2);
3687 *c2 = result;
3688 m_stack.popC();
3691 inline void OPTBLD_INLINE VMExecutionContext::iopAdd(PC& pc) {
3692 implCellBinOp(pc, cellAdd);
3695 inline void OPTBLD_INLINE VMExecutionContext::iopSub(PC& pc) {
3696 implCellBinOp(pc, cellSub);
3699 inline void OPTBLD_INLINE VMExecutionContext::iopMul(PC& pc) {
3700 implCellBinOp(pc, cellMul);
3703 inline void OPTBLD_INLINE VMExecutionContext::iopDiv(PC& pc) {
3704 implCellBinOp(pc, cellDiv);
3707 inline void OPTBLD_INLINE VMExecutionContext::iopMod(PC& pc) {
3708 implCellBinOp(pc, cellMod);
3711 template<class Op>
3712 void OPTBLD_INLINE VMExecutionContext::implCellBinOpBool(PC& pc, Op op) {
3713 NEXT();
3714 auto const c1 = m_stack.topC();
3715 auto const c2 = m_stack.indC(1);
3716 bool const result = op(*c2, *c1);
3717 tvRefcountedDecRefCell(c2);
3718 *c2 = make_tv<KindOfBoolean>(result);
3719 m_stack.popC();
3722 inline void OPTBLD_INLINE VMExecutionContext::iopXor(PC& pc) {
3723 implCellBinOpBool(pc, [&] (Cell c1, Cell c2) -> bool {
3724 return cellToBool(c1) ^ cellToBool(c2);
3728 inline void OPTBLD_INLINE VMExecutionContext::iopSame(PC& pc) {
3729 implCellBinOpBool(pc, cellSame);
3732 inline void OPTBLD_INLINE VMExecutionContext::iopNSame(PC& pc) {
3733 implCellBinOpBool(pc, [&] (Cell c1, Cell c2) {
3734 return !cellSame(c1, c2);
3738 inline void OPTBLD_INLINE VMExecutionContext::iopEq(PC& pc) {
3739 implCellBinOpBool(pc, [&] (Cell c1, Cell c2) {
3740 return cellEqual(c1, c2);
3744 inline void OPTBLD_INLINE VMExecutionContext::iopNeq(PC& pc) {
3745 implCellBinOpBool(pc, [&] (Cell c1, Cell c2) {
3746 return !cellEqual(c1, c2);
3750 inline void OPTBLD_INLINE VMExecutionContext::iopLt(PC& pc) {
3751 implCellBinOpBool(pc, [&] (Cell c1, Cell c2) {
3752 return cellLess(c1, c2);
3756 inline void OPTBLD_INLINE VMExecutionContext::iopLte(PC& pc) {
3757 implCellBinOpBool(pc, cellLessOrEqual);
3760 inline void OPTBLD_INLINE VMExecutionContext::iopGt(PC& pc) {
3761 implCellBinOpBool(pc, [&] (Cell c1, Cell c2) {
3762 return cellGreater(c1, c2);
3766 inline void OPTBLD_INLINE VMExecutionContext::iopGte(PC& pc) {
3767 implCellBinOpBool(pc, cellGreaterOrEqual);
3770 #define MATHOP(OP, VOP) do { \
3771 NEXT(); \
3772 Cell* c1 = m_stack.topC(); \
3773 Cell* c2 = m_stack.indC(1); \
3774 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
3775 int64_t a = c2->m_data.num; \
3776 int64_t b = c1->m_data.num; \
3777 MATHOP_DIVCHECK(0) \
3778 c2->m_data.num = a OP b; \
3779 m_stack.popX(); \
3781 MATHOP_DOUBLE(OP) \
3782 else { \
3783 tvCellAsVariant(c2) = VOP(tvCellAsVariant(c2), tvCellAsCVarRef(c1)); \
3784 m_stack.popC(); \
3786 } while (0)
3788 #define MATHOP_DOUBLE(OP)
3789 #define MATHOP_DIVCHECK(x)
3790 inline void OPTBLD_INLINE VMExecutionContext::iopBitAnd(PC& pc) {
3791 MATHOP(&, bitwise_and);
3794 inline void OPTBLD_INLINE VMExecutionContext::iopBitOr(PC& pc) {
3795 MATHOP(|, bitwise_or);
3798 inline void OPTBLD_INLINE VMExecutionContext::iopBitXor(PC& pc) {
3799 MATHOP(^, bitwise_xor);
3801 #undef MATHOP
3802 #undef MATHOP_DOUBLE
3803 #undef MATHOP_DIVCHECK
3805 inline void OPTBLD_INLINE VMExecutionContext::iopBitNot(PC& pc) {
3806 NEXT();
3807 Cell* c1 = m_stack.topC();
3808 if (LIKELY(c1->m_type == KindOfInt64)) {
3809 c1->m_data.num = ~c1->m_data.num;
3810 } else if (c1->m_type == KindOfDouble) {
3811 c1->m_type = KindOfInt64;
3812 c1->m_data.num = ~int64_t(c1->m_data.dbl);
3813 } else if (IS_STRING_TYPE(c1->m_type)) {
3814 tvCellAsVariant(c1) = tvCellAsVariant(c1).bitNot();
3815 } else {
3816 raise_error("Unsupported operand type for ~");
3820 #define SHIFTOP(OP) do { \
3821 NEXT(); \
3822 Cell* c1 = m_stack.topC(); \
3823 Cell* c2 = m_stack.indC(1); \
3824 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
3825 int64_t a = c2->m_data.num; \
3826 int64_t b = c1->m_data.num; \
3827 c2->m_data.num = a OP b; \
3828 m_stack.popX(); \
3829 } else { \
3830 tvCellAsVariant(c2) = tvCellAsVariant(c2).toInt64() OP \
3831 tvCellAsCVarRef(c1).toInt64(); \
3832 m_stack.popC(); \
3834 } while (0)
3835 inline void OPTBLD_INLINE VMExecutionContext::iopShl(PC& pc) {
3836 SHIFTOP(<<);
3839 inline void OPTBLD_INLINE VMExecutionContext::iopShr(PC& pc) {
3840 SHIFTOP(>>);
3842 #undef SHIFTOP
3844 inline void OPTBLD_INLINE VMExecutionContext::iopCastBool(PC& pc) {
3845 NEXT();
3846 Cell* c1 = m_stack.topC();
3847 tvCastToBooleanInPlace(c1);
3850 inline void OPTBLD_INLINE VMExecutionContext::iopCastInt(PC& pc) {
3851 NEXT();
3852 Cell* c1 = m_stack.topC();
3853 tvCastToInt64InPlace(c1);
3856 inline void OPTBLD_INLINE VMExecutionContext::iopCastDouble(PC& pc) {
3857 NEXT();
3858 Cell* c1 = m_stack.topC();
3859 tvCastToDoubleInPlace(c1);
3862 inline void OPTBLD_INLINE VMExecutionContext::iopCastString(PC& pc) {
3863 NEXT();
3864 Cell* c1 = m_stack.topC();
3865 tvCastToStringInPlace(c1);
3868 inline void OPTBLD_INLINE VMExecutionContext::iopCastArray(PC& pc) {
3869 NEXT();
3870 Cell* c1 = m_stack.topC();
3871 tvCastToArrayInPlace(c1);
3874 inline void OPTBLD_INLINE VMExecutionContext::iopCastObject(PC& pc) {
3875 NEXT();
3876 Cell* c1 = m_stack.topC();
3877 tvCastToObjectInPlace(c1);
3880 inline bool OPTBLD_INLINE VMExecutionContext::cellInstanceOf(
3881 TypedValue* tv, const NamedEntity* ne) {
3882 assert(tv->m_type != KindOfRef);
3883 if (tv->m_type == KindOfObject) {
3884 Class* cls = Unit::lookupClass(ne);
3885 if (cls) return tv->m_data.pobj->instanceof(cls);
3886 } else if (tv->m_type == KindOfArray) {
3887 Class* cls = Unit::lookupClass(ne);
3888 if (cls && interface_supports_array(cls->name())) {
3889 return true;
3892 return false;
3895 inline void OPTBLD_INLINE VMExecutionContext::iopInstanceOf(PC& pc) {
3896 NEXT();
3897 Cell* c1 = m_stack.topC(); // c2 instanceof c1
3898 Cell* c2 = m_stack.indC(1);
3899 bool r = false;
3900 if (IS_STRING_TYPE(c1->m_type)) {
3901 const NamedEntity* rhs = Unit::GetNamedEntity(c1->m_data.pstr);
3902 r = cellInstanceOf(c2, rhs);
3903 } else if (c1->m_type == KindOfObject) {
3904 if (c2->m_type == KindOfObject) {
3905 ObjectData* lhs = c2->m_data.pobj;
3906 ObjectData* rhs = c1->m_data.pobj;
3907 r = lhs->instanceof(rhs->getVMClass());
3909 } else {
3910 raise_error("Class name must be a valid object or a string");
3912 m_stack.popC();
3913 tvRefcountedDecRefCell(c2);
3914 c2->m_data.num = r;
3915 c2->m_type = KindOfBoolean;
3918 inline void OPTBLD_INLINE VMExecutionContext::iopInstanceOfD(PC& pc) {
3919 NEXT();
3920 DECODE(Id, id);
3921 if (shouldProfile()) {
3922 Class::profileInstanceOf(m_fp->m_func->unit()->lookupLitstrId(id));
3924 const NamedEntity* ne = m_fp->m_func->unit()->lookupNamedEntityId(id);
3925 Cell* c1 = m_stack.topC();
3926 bool r = cellInstanceOf(c1, ne);
3927 tvRefcountedDecRefCell(c1);
3928 c1->m_data.num = r;
3929 c1->m_type = KindOfBoolean;
3932 inline void OPTBLD_INLINE VMExecutionContext::iopPrint(PC& pc) {
3933 NEXT();
3934 Cell* c1 = m_stack.topC();
3935 echo(tvCellAsVariant(c1).toString());
3936 tvRefcountedDecRefCell(c1);
3937 c1->m_type = KindOfInt64;
3938 c1->m_data.num = 1;
3941 inline void OPTBLD_INLINE VMExecutionContext::iopClone(PC& pc) {
3942 NEXT();
3943 TypedValue* tv = m_stack.topTV();
3944 if (tv->m_type != KindOfObject) {
3945 raise_error("clone called on non-object");
3947 ObjectData* obj = tv->m_data.pobj;
3948 const Class* class_ UNUSED = obj->getVMClass();
3949 ObjectData* newobj = obj->clone();
3950 m_stack.popTV();
3951 m_stack.pushNull();
3952 tv->m_type = KindOfObject;
3953 tv->m_data.pobj = newobj;
3956 inline void OPTBLD_INLINE VMExecutionContext::iopExit(PC& pc) {
3957 NEXT();
3958 int exitCode = 0;
3959 Cell* c1 = m_stack.topC();
3960 if (c1->m_type == KindOfInt64) {
3961 exitCode = c1->m_data.num;
3962 } else {
3963 echo(tvCellAsVariant(c1).toString());
3965 m_stack.popC();
3966 m_stack.pushNull();
3967 throw ExitException(exitCode);
3970 inline void OPTBLD_INLINE VMExecutionContext::iopFatal(PC& pc) {
3971 NEXT();
3972 TypedValue* top = m_stack.topTV();
3973 std::string msg;
3974 DECODE_IVA(skipFrame);
3975 if (IS_STRING_TYPE(top->m_type)) {
3976 msg = top->m_data.pstr->data();
3977 } else {
3978 msg = "Fatal error message not a string";
3980 m_stack.popTV();
3981 if (skipFrame) {
3982 raise_error_without_first_frame(msg);
3983 } else {
3984 raise_error(msg);
3988 inline void OPTBLD_INLINE VMExecutionContext::jmpSurpriseCheck(Offset offset) {
3989 if (offset < 0 && UNLIKELY(Transl::TargetCache::loadConditionFlags())) {
3990 EventHook::CheckSurprise();
3994 inline void OPTBLD_INLINE VMExecutionContext::iopJmp(PC& pc) {
3995 NEXT();
3996 DECODE_JMP(Offset, offset);
3997 jmpSurpriseCheck(offset);
3999 pc += offset - 1;
4002 template<Op op>
4003 inline void OPTBLD_INLINE VMExecutionContext::jmpOpImpl(PC& pc) {
4004 static_assert(op == OpJmpZ || op == OpJmpNZ,
4005 "jmpOpImpl should only be used by JmpZ and JmpNZ");
4006 NEXT();
4007 DECODE_JMP(Offset, offset);
4008 jmpSurpriseCheck(offset);
4010 Cell* c1 = m_stack.topC();
4011 if (c1->m_type == KindOfInt64 || c1->m_type == KindOfBoolean) {
4012 int64_t n = c1->m_data.num;
4013 if (op == OpJmpZ ? n == 0 : n != 0) {
4014 pc += offset - 1;
4015 m_stack.popX();
4016 } else {
4017 pc += sizeof(Offset);
4018 m_stack.popX();
4020 } else {
4021 auto const condition = toBoolean(tvCellAsCVarRef(c1));
4022 if (op == OpJmpZ ? !condition : condition) {
4023 pc += offset - 1;
4024 m_stack.popC();
4025 } else {
4026 pc += sizeof(Offset);
4027 m_stack.popC();
4032 inline void OPTBLD_INLINE VMExecutionContext::iopJmpZ(PC& pc) {
4033 jmpOpImpl<OpJmpZ>(pc);
4036 inline void OPTBLD_INLINE VMExecutionContext::iopJmpNZ(PC& pc) {
4037 jmpOpImpl<OpJmpNZ>(pc);
4040 #define FREE_ITER_LIST(typeList, idList, vecLen) do { \
4041 int iterIndex; \
4042 for (iterIndex = 0; iterIndex < 2 * veclen; iterIndex += 2) { \
4043 Id iterType = typeList[iterIndex]; \
4044 Id iterId = idList[iterIndex]; \
4046 Iter *iter = frame_iter(m_fp, iterId); \
4048 switch (iterType) { \
4049 case KindOfIter: iter->free(); break; \
4050 case KindOfMIter: iter->mfree(); break; \
4051 case KindOfCIter: iter->cfree(); break; \
4054 } while(0)
4056 inline void OPTBLD_INLINE VMExecutionContext::iopIterBreak(PC& pc) {
4057 PC savedPc = pc;
4058 NEXT();
4059 DECODE_ITER_LIST(iterTypeList, iterIdList, veclen);
4060 DECODE_JMP(Offset, offset);
4062 jmpSurpriseCheck(offset); // we do this early so iterators are still dirty if
4063 // we have an exception
4065 FREE_ITER_LIST(iterTypeList, iterIdList, veclen);
4066 pc = savedPc + offset;
4069 #undef FREE_ITER_LIST
4071 enum class SwitchMatch {
4072 NORMAL, // value was converted to an int: match normally
4073 NONZERO, // can't be converted to an int: match first nonzero case
4074 DEFAULT, // can't be converted to an int: match default case
4077 static SwitchMatch doubleCheck(double d, int64_t& out) {
4078 if (int64_t(d) == d) {
4079 out = d;
4080 return SwitchMatch::NORMAL;
4081 } else {
4082 return SwitchMatch::DEFAULT;
4086 inline void OPTBLD_INLINE VMExecutionContext::iopSwitch(PC& pc) {
4087 PC origPC = pc;
4088 NEXT();
4089 DECODE(int32_t, veclen);
4090 assert(veclen > 0);
4091 Offset* jmptab = (Offset*)pc;
4092 pc += veclen * sizeof(*jmptab);
4093 DECODE(int64_t, base);
4094 DECODE_IVA(bounded);
4096 TypedValue* val = m_stack.topTV();
4097 if (!bounded) {
4098 assert(val->m_type == KindOfInt64);
4099 // Continuation switch: no bounds checking needed
4100 int64_t label = val->m_data.num;
4101 m_stack.popX();
4102 assert(label >= 0 && label < veclen);
4103 pc = origPC + jmptab[label];
4104 } else {
4105 // Generic integer switch
4106 int64_t intval;
4107 SwitchMatch match = SwitchMatch::NORMAL;
4109 switch (val->m_type) {
4110 case KindOfUninit:
4111 case KindOfNull:
4112 intval = 0;
4113 break;
4115 case KindOfBoolean:
4116 // bool(true) is equal to any non-zero int, bool(false) == 0
4117 if (val->m_data.num) {
4118 match = SwitchMatch::NONZERO;
4119 } else {
4120 intval = 0;
4122 break;
4124 case KindOfInt64:
4125 intval = val->m_data.num;
4126 break;
4128 case KindOfDouble:
4129 match = doubleCheck(val->m_data.dbl, intval);
4130 break;
4132 case KindOfStaticString:
4133 case KindOfString: {
4134 double dval = 0.0;
4135 DataType t = val->m_data.pstr->isNumericWithVal(intval, dval, 1);
4136 switch (t) {
4137 case KindOfNull:
4138 intval = 0;
4139 break;
4141 case KindOfDouble:
4142 match = doubleCheck(dval, intval);
4143 break;
4145 case KindOfInt64:
4146 // do nothing
4147 break;
4149 default:
4150 not_reached();
4152 tvRefcountedDecRef(val);
4153 break;
4156 case KindOfArray:
4157 match = SwitchMatch::DEFAULT;
4158 tvDecRef(val);
4159 break;
4161 case KindOfObject:
4162 intval = val->m_data.pobj->o_toInt64();
4163 tvDecRef(val);
4164 break;
4166 default:
4167 not_reached();
4169 m_stack.discard();
4171 if (match != SwitchMatch::NORMAL ||
4172 intval < base || intval >= (base + veclen - 2)) {
4173 switch (match) {
4174 case SwitchMatch::NORMAL:
4175 case SwitchMatch::DEFAULT:
4176 pc = origPC + jmptab[veclen - 1];
4177 break;
4179 case SwitchMatch::NONZERO:
4180 pc = origPC + jmptab[veclen - 2];
4181 break;
4183 } else {
4184 pc = origPC + jmptab[intval - base];
4189 inline void OPTBLD_INLINE VMExecutionContext::iopSSwitch(PC& pc) {
4190 PC origPC = pc;
4191 NEXT();
4192 DECODE(int32_t, veclen);
4193 assert(veclen > 1);
4194 unsigned cases = veclen - 1; // the last vector item is the default case
4195 StrVecItem* jmptab = (StrVecItem*)pc;
4196 pc += veclen * sizeof(*jmptab);
4198 Cell* val = tvToCell(m_stack.topTV());
4199 Unit* u = m_fp->m_func->unit();
4200 unsigned i;
4201 for (i = 0; i < cases; ++i) {
4202 auto& item = jmptab[i];
4203 const StringData* str = u->lookupLitstrId(item.str);
4204 if (cellEqual(*val, str)) {
4205 pc = origPC + item.dest;
4206 break;
4209 if (i == cases) {
4210 // default case
4211 pc = origPC + jmptab[veclen-1].dest;
4213 m_stack.popC();
4216 inline void OPTBLD_INLINE VMExecutionContext::iopRetC(PC& pc) {
4217 NEXT();
4218 uint soff = m_fp->m_soff;
4219 assert(!m_fp->m_func->isGenerator());
4221 // Call the runtime helpers to free the local variables and iterators
4222 frame_free_locals_inl(m_fp, m_fp->m_func->numLocals());
4223 ActRec* sfp = m_fp->arGetSfp();
4224 // Memcpy the the return value on top of the activation record. This works
4225 // the same regardless of whether the return value is boxed or not.
4226 TypedValue* retval_ptr = &m_fp->m_r;
4227 memcpy(retval_ptr, m_stack.topTV(), sizeof(TypedValue));
4228 // Adjust the stack
4229 m_stack.ndiscard(m_fp->m_func->numSlotsInFrame() + 1);
4231 if (LIKELY(sfp != m_fp)) {
4232 // Restore caller's execution state.
4233 m_fp = sfp;
4234 pc = m_fp->m_func->unit()->entry() + m_fp->m_func->base() + soff;
4235 m_stack.ret();
4236 assert(m_stack.topTV() == retval_ptr);
4237 } else {
4238 // No caller; terminate.
4239 m_stack.ret();
4240 #ifdef HPHP_TRACE
4242 std::ostringstream os;
4243 os << toStringElm(m_stack.topTV());
4244 ONTRACE(1,
4245 Trace::trace("Return %s from VMExecutionContext::dispatch("
4246 "%p)\n", os.str().c_str(), m_fp));
4248 #endif
4249 pc = 0;
4253 inline void OPTBLD_INLINE VMExecutionContext::iopRetV(PC& pc) {
4254 iopRetC(pc);
4257 inline void OPTBLD_INLINE VMExecutionContext::iopUnwind(PC& pc) {
4258 assert(!m_faults.empty());
4259 assert(m_faults.back().m_savedRaiseOffset != kInvalidOffset);
4260 throw VMPrepareUnwind();
4263 inline void OPTBLD_INLINE VMExecutionContext::iopThrow(PC& pc) {
4264 Cell* c1 = m_stack.topC();
4265 if (c1->m_type != KindOfObject ||
4266 !c1->m_data.pobj->instanceof(SystemLib::s_ExceptionClass)) {
4267 raise_error("Exceptions must be valid objects derived from the "
4268 "Exception base class");
4271 Object obj(c1->m_data.pobj);
4272 m_stack.popC();
4273 DEBUGGER_ATTACHED_ONLY(phpDebuggerExceptionThrownHook(obj.get()));
4274 throw obj;
4277 inline void OPTBLD_INLINE VMExecutionContext::iopAGetC(PC& pc) {
4278 NEXT();
4279 TypedValue* tv = m_stack.topTV();
4280 lookupClsRef(tv, tv, true);
4283 inline void OPTBLD_INLINE VMExecutionContext::iopAGetL(PC& pc) {
4284 NEXT();
4285 DECODE_HA(local);
4286 TypedValue* top = m_stack.allocTV();
4287 TypedValue* fr = frame_local_inner(m_fp, local);
4288 lookupClsRef(fr, top);
4291 static void raise_undefined_local(ActRec* fp, Id pind) {
4292 assert(pind < fp->m_func->numNamedLocals());
4293 raise_notice(Strings::UNDEFINED_VARIABLE,
4294 fp->m_func->localVarName(pind)->data());
4297 static inline void cgetl_inner_body(TypedValue* fr, TypedValue* to) {
4298 assert(fr->m_type != KindOfUninit);
4299 tvDup(*fr, *to);
4300 if (to->m_type == KindOfRef) {
4301 tvUnbox(to);
4305 static inline void cgetl_body(ActRec* fp,
4306 TypedValue* fr,
4307 TypedValue* to,
4308 Id pind) {
4309 if (fr->m_type == KindOfUninit) {
4310 // `to' is uninitialized here, so we need to tvWriteNull before
4311 // possibly causing stack unwinding.
4312 tvWriteNull(to);
4313 raise_undefined_local(fp, pind);
4314 } else {
4315 cgetl_inner_body(fr, to);
4319 inline void OPTBLD_INLINE VMExecutionContext::iopCGetL(PC& pc) {
4320 NEXT();
4321 DECODE_HA(local);
4322 Cell* to = m_stack.allocC();
4323 TypedValue* fr = frame_local(m_fp, local);
4324 cgetl_body(m_fp, fr, to, local);
4327 inline void OPTBLD_INLINE VMExecutionContext::iopCGetL2(PC& pc) {
4328 NEXT();
4329 DECODE_HA(local);
4330 TypedValue* oldTop = m_stack.topTV();
4331 TypedValue* newTop = m_stack.allocTV();
4332 memcpy(newTop, oldTop, sizeof *newTop);
4333 Cell* to = oldTop;
4334 TypedValue* fr = frame_local(m_fp, local);
4335 cgetl_body(m_fp, fr, to, local);
4338 inline void OPTBLD_INLINE VMExecutionContext::iopCGetL3(PC& pc) {
4339 NEXT();
4340 DECODE_HA(local);
4341 TypedValue* oldTop = m_stack.topTV();
4342 TypedValue* oldSubTop = m_stack.indTV(1);
4343 TypedValue* newTop = m_stack.allocTV();
4344 memmove(newTop, oldTop, sizeof *oldTop * 2);
4345 Cell* to = oldSubTop;
4346 TypedValue* fr = frame_local(m_fp, local);
4347 cgetl_body(m_fp, fr, to, local);
4350 inline void OPTBLD_INLINE VMExecutionContext::iopCGetN(PC& pc) {
4351 NEXT();
4352 StringData* name;
4353 TypedValue* to = m_stack.topTV();
4354 TypedValue* fr = nullptr;
4355 lookup_var(m_fp, name, to, fr);
4356 if (fr == nullptr || fr->m_type == KindOfUninit) {
4357 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
4358 tvRefcountedDecRefCell(to);
4359 tvWriteNull(to);
4360 } else {
4361 tvRefcountedDecRefCell(to);
4362 cgetl_inner_body(fr, to);
4364 decRefStr(name); // TODO(#1146727): leaks during exceptions
4367 inline void OPTBLD_INLINE VMExecutionContext::iopCGetG(PC& pc) {
4368 NEXT();
4369 StringData* name;
4370 TypedValue* to = m_stack.topTV();
4371 TypedValue* fr = nullptr;
4372 lookup_gbl(m_fp, name, to, fr);
4373 if (fr == nullptr) {
4374 if (MoreWarnings) {
4375 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
4377 tvRefcountedDecRefCell(to);
4378 tvWriteNull(to);
4379 } else if (fr->m_type == KindOfUninit) {
4380 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
4381 tvRefcountedDecRefCell(to);
4382 tvWriteNull(to);
4383 } else {
4384 tvRefcountedDecRefCell(to);
4385 cgetl_inner_body(fr, to);
4387 decRefStr(name); // TODO(#1146727): leaks during exceptions
4390 #define SPROP_OP_PRELUDE \
4391 NEXT(); \
4392 TypedValue* clsref = m_stack.topTV(); \
4393 TypedValue* nameCell = m_stack.indTV(1); \
4394 TypedValue* output = nameCell; \
4395 TypedValue* val; \
4396 bool visible, accessible; \
4397 lookup_sprop(m_fp, clsref, name, nameCell, val, visible, \
4398 accessible);
4400 #define SPROP_OP_POSTLUDE \
4401 decRefStr(name);
4403 #define GETS(box) do { \
4404 SPROP_OP_PRELUDE \
4405 if (!(visible && accessible)) { \
4406 raise_error("Invalid static property access: %s::%s", \
4407 clsref->m_data.pcls->name()->data(), \
4408 name->data()); \
4410 if (box) { \
4411 if (val->m_type != KindOfRef) { \
4412 tvBox(val); \
4414 varDup(*val, *output); \
4415 } else { \
4416 tvReadCell(val, output); \
4418 m_stack.popA(); \
4419 SPROP_OP_POSTLUDE \
4420 } while (0)
4422 inline void OPTBLD_INLINE VMExecutionContext::iopCGetS(PC& pc) {
4423 StringData* name;
4424 GETS(false);
4425 if (shouldProfile() && name && name->isStatic()) {
4426 recordType(TypeProfileKey(TypeProfileKey::StaticPropName, name),
4427 m_stack.top()->m_type);
4431 inline void OPTBLD_INLINE VMExecutionContext::iopCGetM(PC& pc) {
4432 PC oldPC = pc;
4433 NEXT();
4434 DECLARE_GETHELPER_ARGS
4435 getHelper(GETHELPER_ARGS);
4436 if (tvRet->m_type == KindOfRef) {
4437 tvUnbox(tvRet);
4439 assert(hasImmVector(toOp(*oldPC)));
4440 const ImmVector& immVec = ImmVector::createFromStream(oldPC + 1);
4441 StringData* name;
4442 MemberCode mc;
4443 if (immVec.decodeLastMember(curUnit(), name, mc)) {
4444 recordType(TypeProfileKey(mc, name), m_stack.top()->m_type);
4448 static inline void vgetl_body(TypedValue* fr, TypedValue* to) {
4449 if (fr->m_type != KindOfRef) {
4450 tvBox(fr);
4452 tvDup(*fr, *to);
4455 inline void OPTBLD_INLINE VMExecutionContext::iopVGetL(PC& pc) {
4456 NEXT();
4457 DECODE_HA(local);
4458 Var* to = m_stack.allocV();
4459 TypedValue* fr = frame_local(m_fp, local);
4460 vgetl_body(fr, to);
4463 inline void OPTBLD_INLINE VMExecutionContext::iopVGetN(PC& pc) {
4464 NEXT();
4465 StringData* name;
4466 TypedValue* to = m_stack.topTV();
4467 TypedValue* fr = nullptr;
4468 lookupd_var(m_fp, name, to, fr);
4469 assert(fr != nullptr);
4470 tvRefcountedDecRefCell(to);
4471 vgetl_body(fr, to);
4472 decRefStr(name);
4475 inline void OPTBLD_INLINE VMExecutionContext::iopVGetG(PC& pc) {
4476 NEXT();
4477 StringData* name;
4478 TypedValue* to = m_stack.topTV();
4479 TypedValue* fr = nullptr;
4480 lookupd_gbl(m_fp, name, to, fr);
4481 assert(fr != nullptr);
4482 tvRefcountedDecRefCell(to);
4483 vgetl_body(fr, to);
4484 decRefStr(name);
4487 inline void OPTBLD_INLINE VMExecutionContext::iopVGetS(PC& pc) {
4488 StringData* name;
4489 GETS(true);
4491 #undef GETS
4493 inline void OPTBLD_INLINE VMExecutionContext::iopVGetM(PC& pc) {
4494 NEXT();
4495 DECLARE_SETHELPER_ARGS
4496 TypedValue* tv1 = m_stack.allocTV();
4497 tvWriteUninit(tv1);
4498 if (!setHelperPre<false, true, false, true, 1,
4499 VectorLeaveCode::ConsumeAll>(MEMBERHELPERPRE_ARGS)) {
4500 if (base->m_type != KindOfRef) {
4501 tvBox(base);
4503 varDup(*base, *tv1);
4504 } else {
4505 tvWriteNull(tv1);
4506 tvBox(tv1);
4508 setHelperPost<1>(SETHELPERPOST_ARGS);
4511 inline void OPTBLD_INLINE VMExecutionContext::iopIssetN(PC& pc) {
4512 NEXT();
4513 StringData* name;
4514 TypedValue* tv1 = m_stack.topTV();
4515 TypedValue* tv = nullptr;
4516 bool e;
4517 lookup_var(m_fp, name, tv1, tv);
4518 if (tv == nullptr) {
4519 e = false;
4520 } else {
4521 e = isset(tvAsCVarRef(tv));
4523 tvRefcountedDecRefCell(tv1);
4524 tv1->m_data.num = e;
4525 tv1->m_type = KindOfBoolean;
4526 decRefStr(name);
4529 inline void OPTBLD_INLINE VMExecutionContext::iopIssetG(PC& pc) {
4530 NEXT();
4531 StringData* name;
4532 TypedValue* tv1 = m_stack.topTV();
4533 TypedValue* tv = nullptr;
4534 bool e;
4535 lookup_gbl(m_fp, name, tv1, tv);
4536 if (tv == nullptr) {
4537 e = false;
4538 } else {
4539 e = isset(tvAsCVarRef(tv));
4541 tvRefcountedDecRefCell(tv1);
4542 tv1->m_data.num = e;
4543 tv1->m_type = KindOfBoolean;
4544 decRefStr(name);
4547 inline void OPTBLD_INLINE VMExecutionContext::iopIssetS(PC& pc) {
4548 StringData* name;
4549 SPROP_OP_PRELUDE
4550 bool e;
4551 if (!(visible && accessible)) {
4552 e = false;
4553 } else {
4554 e = isset(tvAsCVarRef(val));
4556 m_stack.popA();
4557 output->m_data.num = e;
4558 output->m_type = KindOfBoolean;
4559 SPROP_OP_POSTLUDE
4562 template <bool isEmpty>
4563 inline void OPTBLD_INLINE VMExecutionContext::isSetEmptyM(PC& pc) {
4564 NEXT();
4565 DECLARE_GETHELPER_ARGS
4566 getHelperPre<false, false, VectorLeaveCode::LeaveLast>(MEMBERHELPERPRE_ARGS);
4567 // Process last member specially, in order to employ the IssetElem/IssetProp
4568 // operations.
4569 bool isSetEmptyResult = false;
4570 switch (mcode) {
4571 case MEL:
4572 case MEC:
4573 case MET:
4574 case MEI: {
4575 isSetEmptyResult = IssetEmptyElem<isEmpty>(tvScratch, *tvRef.asTypedValue(),
4576 base, curMember);
4577 break;
4579 case MPL:
4580 case MPC:
4581 case MPT: {
4582 Class* ctx = arGetContextClass(m_fp);
4583 isSetEmptyResult = IssetEmptyProp<isEmpty>(ctx, base, curMember);
4584 break;
4586 default: assert(false);
4588 getHelperPost<false>(GETHELPERPOST_ARGS);
4589 tvRet->m_data.num = isSetEmptyResult;
4590 tvRet->m_type = KindOfBoolean;
4593 inline void OPTBLD_INLINE VMExecutionContext::iopIssetM(PC& pc) {
4594 isSetEmptyM<false>(pc);
4597 #define IOP_TYPE_CHECK_INSTR_L(checkInit, what, predicate) \
4598 inline void OPTBLD_INLINE VMExecutionContext::iopIs ## what ## L(PC& pc) { \
4599 NEXT(); \
4600 DECODE_HA(local); \
4601 TypedValue* tv = frame_local(m_fp, local); \
4602 if (checkInit && tv->m_type == KindOfUninit) { \
4603 raise_undefined_local(m_fp, local); \
4605 bool ret = predicate(tvAsCVarRef(tv)); \
4606 TypedValue* topTv = m_stack.allocTV(); \
4607 topTv->m_data.num = ret; \
4608 topTv->m_type = KindOfBoolean; \
4611 #define IOP_TYPE_CHECK_INSTR_C(checkInit, what, predicate) \
4612 inline void OPTBLD_INLINE VMExecutionContext::iopIs ## what ## C(PC& pc) { \
4613 NEXT(); \
4614 TypedValue* topTv = m_stack.topTV(); \
4615 assert(topTv->m_type != KindOfRef); \
4616 bool ret = predicate(tvAsCVarRef(topTv)); \
4617 tvRefcountedDecRefCell(topTv); \
4618 topTv->m_data.num = ret; \
4619 topTv->m_type = KindOfBoolean; \
4622 #define IOP_TYPE_CHECK_INSTR(checkInit, what, predicate) \
4623 IOP_TYPE_CHECK_INSTR_L(checkInit, what, predicate) \
4624 IOP_TYPE_CHECK_INSTR_C(checkInit, what, predicate) \
4626 IOP_TYPE_CHECK_INSTR_L(false, set, isset)
4627 IOP_TYPE_CHECK_INSTR(true, Null, is_null)
4628 IOP_TYPE_CHECK_INSTR(true, Array, is_array)
4629 IOP_TYPE_CHECK_INSTR(true, String, is_string)
4630 IOP_TYPE_CHECK_INSTR(true, Object, is_object)
4631 IOP_TYPE_CHECK_INSTR(true, Int, is_int)
4632 IOP_TYPE_CHECK_INSTR(true, Double, is_double)
4633 IOP_TYPE_CHECK_INSTR(true, Bool, is_bool)
4634 #undef IOP_TYPE_CHECK_INSTR
4636 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyL(PC& pc) {
4637 NEXT();
4638 DECODE_HA(local);
4639 TypedValue* loc = frame_local(m_fp, local);
4640 bool e = empty(tvAsCVarRef(loc));
4641 TypedValue* tv1 = m_stack.allocTV();
4642 tv1->m_data.num = e;
4643 tv1->m_type = KindOfBoolean;
4646 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyN(PC& pc) {
4647 NEXT();
4648 StringData* name;
4649 TypedValue* tv1 = m_stack.topTV();
4650 TypedValue* tv = nullptr;
4651 bool e;
4652 lookup_var(m_fp, name, tv1, tv);
4653 if (tv == nullptr) {
4654 e = true;
4655 } else {
4656 e = empty(tvAsCVarRef(tv));
4658 tvRefcountedDecRefCell(tv1);
4659 tv1->m_data.num = e;
4660 tv1->m_type = KindOfBoolean;
4661 decRefStr(name);
4664 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyG(PC& pc) {
4665 NEXT();
4666 StringData* name;
4667 TypedValue* tv1 = m_stack.topTV();
4668 TypedValue* tv = nullptr;
4669 bool e;
4670 lookup_gbl(m_fp, name, tv1, tv);
4671 if (tv == nullptr) {
4672 e = true;
4673 } else {
4674 e = empty(tvAsCVarRef(tv));
4676 tvRefcountedDecRefCell(tv1);
4677 tv1->m_data.num = e;
4678 tv1->m_type = KindOfBoolean;
4679 decRefStr(name);
4682 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyS(PC& pc) {
4683 StringData* name;
4684 SPROP_OP_PRELUDE
4685 bool e;
4686 if (!(visible && accessible)) {
4687 e = true;
4688 } else {
4689 e = empty(tvAsCVarRef(val));
4691 m_stack.popA();
4692 output->m_data.num = e;
4693 output->m_type = KindOfBoolean;
4694 SPROP_OP_POSTLUDE
4697 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyM(PC& pc) {
4698 isSetEmptyM<true>(pc);
4701 inline void OPTBLD_INLINE VMExecutionContext::iopAKExists(PC& pc) {
4702 NEXT();
4703 TypedValue* arr = m_stack.topTV();
4704 TypedValue* key = arr + 1;
4705 bool result = f_array_key_exists(tvAsCVarRef(key), tvAsCVarRef(arr));
4706 m_stack.popTV();
4707 tvRefcountedDecRef(key);
4708 key->m_data.num = result;
4709 key->m_type = KindOfBoolean;
4712 inline void OPTBLD_INLINE VMExecutionContext::iopArrayIdx(PC& pc) {
4713 NEXT();
4714 TypedValue* def = m_stack.topTV();
4715 TypedValue* arr = m_stack.indTV(1);
4716 TypedValue* key = m_stack.indTV(2);
4718 Variant result = f_hphp_array_idx(tvAsCVarRef(key),
4719 tvAsCVarRef(arr),
4720 tvAsCVarRef(def));
4721 m_stack.popTV();
4722 m_stack.popTV();
4723 tvAsVariant(key) = result;
4726 inline void OPTBLD_INLINE VMExecutionContext::iopSetL(PC& pc) {
4727 NEXT();
4728 DECODE_HA(local);
4729 assert(local < m_fp->m_func->numLocals());
4730 Cell* fr = m_stack.topC();
4731 TypedValue* to = frame_local(m_fp, local);
4732 tvSet(*fr, *to);
4735 inline void OPTBLD_INLINE VMExecutionContext::iopSetN(PC& pc) {
4736 NEXT();
4737 StringData* name;
4738 Cell* fr = m_stack.topC();
4739 TypedValue* tv2 = m_stack.indTV(1);
4740 TypedValue* to = nullptr;
4741 lookupd_var(m_fp, name, tv2, to);
4742 assert(to != nullptr);
4743 tvSet(*fr, *to);
4744 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
4745 m_stack.discard();
4746 decRefStr(name);
4749 inline void OPTBLD_INLINE VMExecutionContext::iopSetG(PC& pc) {
4750 NEXT();
4751 StringData* name;
4752 Cell* fr = m_stack.topC();
4753 TypedValue* tv2 = m_stack.indTV(1);
4754 TypedValue* to = nullptr;
4755 lookupd_gbl(m_fp, name, tv2, to);
4756 assert(to != nullptr);
4757 tvSet(*fr, *to);
4758 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
4759 m_stack.discard();
4760 decRefStr(name);
4763 inline void OPTBLD_INLINE VMExecutionContext::iopSetS(PC& pc) {
4764 NEXT();
4765 TypedValue* tv1 = m_stack.topTV();
4766 TypedValue* classref = m_stack.indTV(1);
4767 TypedValue* propn = m_stack.indTV(2);
4768 TypedValue* output = propn;
4769 StringData* name;
4770 TypedValue* val;
4771 bool visible, accessible;
4772 lookup_sprop(m_fp, classref, name, propn, val, visible, accessible);
4773 if (!(visible && accessible)) {
4774 raise_error("Invalid static property access: %s::%s",
4775 classref->m_data.pcls->name()->data(),
4776 name->data());
4778 tvSet(*tv1, *val);
4779 tvRefcountedDecRefCell(propn);
4780 memcpy(output, tv1, sizeof(TypedValue));
4781 m_stack.ndiscard(2);
4782 decRefStr(name);
4785 inline void OPTBLD_INLINE VMExecutionContext::iopSetM(PC& pc) {
4786 NEXT();
4787 DECLARE_SETHELPER_ARGS
4788 if (!setHelperPre<false, true, false, false, 1,
4789 VectorLeaveCode::LeaveLast>(MEMBERHELPERPRE_ARGS)) {
4790 Cell* c1 = m_stack.topC();
4792 if (mcode == MW) {
4793 SetNewElem<true>(base, c1);
4794 } else {
4795 switch (mcode) {
4796 case MEL:
4797 case MEC:
4798 case MET:
4799 case MEI: {
4800 StringData* result = SetElem<true>(base, curMember, c1);
4801 if (result) {
4802 tvRefcountedDecRefCell(c1);
4803 c1->m_type = KindOfString;
4804 c1->m_data.pstr = result;
4806 break;
4808 case MPL:
4809 case MPC:
4810 case MPT: {
4811 Class* ctx = arGetContextClass(m_fp);
4812 SetProp<true>(ctx, base, curMember, c1);
4813 break;
4815 default: assert(false);
4819 setHelperPost<1>(SETHELPERPOST_ARGS);
4822 inline void OPTBLD_INLINE VMExecutionContext::iopSetWithRefLM(PC& pc) {
4823 NEXT();
4824 DECLARE_SETHELPER_ARGS
4825 bool skip = setHelperPre<false, true, false, false, 0,
4826 VectorLeaveCode::ConsumeAll>(MEMBERHELPERPRE_ARGS);
4827 DECODE_HA(local);
4828 if (!skip) {
4829 TypedValue* from = frame_local(m_fp, local);
4830 tvAsVariant(base) = withRefBind(tvAsVariant(from));
4832 setHelperPost<0>(SETHELPERPOST_ARGS);
4835 inline void OPTBLD_INLINE VMExecutionContext::iopSetWithRefRM(PC& pc) {
4836 NEXT();
4837 DECLARE_SETHELPER_ARGS
4838 bool skip = setHelperPre<false, true, false, false, 1,
4839 VectorLeaveCode::ConsumeAll>(MEMBERHELPERPRE_ARGS);
4840 if (!skip) {
4841 TypedValue* from = m_stack.top();
4842 tvAsVariant(base) = withRefBind(tvAsVariant(from));
4844 setHelperPost<0>(SETHELPERPOST_ARGS);
4845 m_stack.popTV();
4848 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpL(PC& pc) {
4849 NEXT();
4850 DECODE_HA(local);
4851 DECODE(unsigned char, op);
4852 Cell* fr = m_stack.topC();
4853 TypedValue* to = frame_local(m_fp, local);
4854 SETOP_BODY(to, op, fr);
4855 tvRefcountedDecRefCell(fr);
4856 tvReadCell(to, fr);
4859 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpN(PC& pc) {
4860 NEXT();
4861 DECODE(unsigned char, op);
4862 StringData* name;
4863 Cell* fr = m_stack.topC();
4864 TypedValue* tv2 = m_stack.indTV(1);
4865 TypedValue* to = nullptr;
4866 // XXX We're probably not getting warnings totally correct here
4867 lookupd_var(m_fp, name, tv2, to);
4868 assert(to != nullptr);
4869 SETOP_BODY(to, op, fr);
4870 tvRefcountedDecRef(fr);
4871 tvRefcountedDecRef(tv2);
4872 tvReadCell(to, tv2);
4873 m_stack.discard();
4874 decRefStr(name);
4877 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpG(PC& pc) {
4878 NEXT();
4879 DECODE(unsigned char, op);
4880 StringData* name;
4881 Cell* fr = m_stack.topC();
4882 TypedValue* tv2 = m_stack.indTV(1);
4883 TypedValue* to = nullptr;
4884 // XXX We're probably not getting warnings totally correct here
4885 lookupd_gbl(m_fp, name, tv2, to);
4886 assert(to != nullptr);
4887 SETOP_BODY(to, op, fr);
4888 tvRefcountedDecRef(fr);
4889 tvRefcountedDecRef(tv2);
4890 tvReadCell(to, tv2);
4891 m_stack.discard();
4892 decRefStr(name);
4895 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpS(PC& pc) {
4896 NEXT();
4897 DECODE(unsigned char, op);
4898 Cell* fr = m_stack.topC();
4899 TypedValue* classref = m_stack.indTV(1);
4900 TypedValue* propn = m_stack.indTV(2);
4901 TypedValue* output = propn;
4902 StringData* name;
4903 TypedValue* val;
4904 bool visible, accessible;
4905 lookup_sprop(m_fp, classref, name, propn, val, visible, accessible);
4906 if (!(visible && accessible)) {
4907 raise_error("Invalid static property access: %s::%s",
4908 classref->m_data.pcls->name()->data(),
4909 name->data());
4911 SETOP_BODY(val, op, fr);
4912 tvRefcountedDecRefCell(propn);
4913 tvRefcountedDecRef(fr);
4914 tvReadCell(val, output);
4915 m_stack.ndiscard(2);
4916 decRefStr(name);
4919 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpM(PC& pc) {
4920 NEXT();
4921 DECODE(unsigned char, op);
4922 DECLARE_SETHELPER_ARGS
4923 if (!setHelperPre<MoreWarnings, true, false, false, 1,
4924 VectorLeaveCode::LeaveLast>(MEMBERHELPERPRE_ARGS)) {
4925 TypedValue* result;
4926 Cell* rhs = m_stack.topC();
4928 if (mcode == MW) {
4929 result = SetOpNewElem(tvScratch, *tvRef.asTypedValue(), op, base, rhs);
4930 } else {
4931 switch (mcode) {
4932 case MEL:
4933 case MEC:
4934 case MET:
4935 case MEI:
4936 result = SetOpElem(tvScratch, *tvRef.asTypedValue(), op, base,
4937 curMember, rhs);
4938 break;
4939 case MPL:
4940 case MPC:
4941 case MPT: {
4942 Class *ctx = arGetContextClass(m_fp);
4943 result = SetOpProp(tvScratch, *tvRef.asTypedValue(), ctx, op, base,
4944 curMember, rhs);
4945 break;
4947 default:
4948 assert(false);
4949 result = nullptr; // Silence compiler warning.
4953 tvRefcountedDecRef(rhs);
4954 tvReadCell(result, rhs);
4956 setHelperPost<1>(SETHELPERPOST_ARGS);
4959 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecL(PC& pc) {
4960 NEXT();
4961 DECODE_HA(local);
4962 DECODE(unsigned char, op);
4963 TypedValue* to = m_stack.allocTV();
4964 tvWriteUninit(to);
4965 TypedValue* fr = frame_local(m_fp, local);
4966 IncDecBody<true>(op, fr, to);
4969 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecN(PC& pc) {
4970 NEXT();
4971 DECODE(unsigned char, op);
4972 StringData* name;
4973 TypedValue* nameCell = m_stack.topTV();
4974 TypedValue* local = nullptr;
4975 // XXX We're probably not getting warnings totally correct here
4976 lookupd_var(m_fp, name, nameCell, local);
4977 assert(local != nullptr);
4978 IncDecBody<true>(op, local, nameCell);
4979 decRefStr(name);
4982 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecG(PC& pc) {
4983 NEXT();
4984 DECODE(unsigned char, op);
4985 StringData* name;
4986 TypedValue* nameCell = m_stack.topTV();
4987 TypedValue* gbl = nullptr;
4988 // XXX We're probably not getting warnings totally correct here
4989 lookupd_gbl(m_fp, name, nameCell, gbl);
4990 assert(gbl != nullptr);
4991 IncDecBody<true>(op, gbl, nameCell);
4992 decRefStr(name);
4995 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecS(PC& pc) {
4996 StringData* name;
4997 SPROP_OP_PRELUDE
4998 DECODE(unsigned char, op);
4999 if (!(visible && accessible)) {
5000 raise_error("Invalid static property access: %s::%s",
5001 clsref->m_data.pcls->name()->data(),
5002 name->data());
5004 tvRefcountedDecRefCell(nameCell);
5005 IncDecBody<true>(op, val, output);
5006 m_stack.discard();
5007 SPROP_OP_POSTLUDE
5010 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecM(PC& pc) {
5011 NEXT();
5012 DECODE(unsigned char, op);
5013 DECLARE_SETHELPER_ARGS
5014 TypedValue to;
5015 tvWriteUninit(&to);
5016 if (!setHelperPre<MoreWarnings, true, false, false, 0,
5017 VectorLeaveCode::LeaveLast>(MEMBERHELPERPRE_ARGS)) {
5018 if (mcode == MW) {
5019 IncDecNewElem<true>(tvScratch, *tvRef.asTypedValue(), op, base, to);
5020 } else {
5021 switch (mcode) {
5022 case MEL:
5023 case MEC:
5024 case MET:
5025 case MEI:
5026 IncDecElem<true>(tvScratch, *tvRef.asTypedValue(), op, base,
5027 curMember, to);
5028 break;
5029 case MPL:
5030 case MPC:
5031 case MPT: {
5032 Class* ctx = arGetContextClass(m_fp);
5033 IncDecProp<true>(tvScratch, *tvRef.asTypedValue(), ctx, op, base,
5034 curMember, to);
5035 break;
5037 default: assert(false);
5041 setHelperPost<0>(SETHELPERPOST_ARGS);
5042 Cell* c1 = m_stack.allocC();
5043 memcpy(c1, &to, sizeof(TypedValue));
5046 inline void OPTBLD_INLINE VMExecutionContext::iopBindL(PC& pc) {
5047 NEXT();
5048 DECODE_HA(local);
5049 Var* fr = m_stack.topV();
5050 TypedValue* to = frame_local(m_fp, local);
5051 tvBind(fr, to);
5054 inline void OPTBLD_INLINE VMExecutionContext::iopBindN(PC& pc) {
5055 NEXT();
5056 StringData* name;
5057 TypedValue* fr = m_stack.topTV();
5058 TypedValue* nameTV = m_stack.indTV(1);
5059 TypedValue* to = nullptr;
5060 lookupd_var(m_fp, name, nameTV, to);
5061 assert(to != nullptr);
5062 tvBind(fr, to);
5063 memcpy((void*)nameTV, (void*)fr, sizeof(TypedValue));
5064 m_stack.discard();
5065 decRefStr(name);
5068 inline void OPTBLD_INLINE VMExecutionContext::iopBindG(PC& pc) {
5069 NEXT();
5070 StringData* name;
5071 TypedValue* fr = m_stack.topTV();
5072 TypedValue* nameTV = m_stack.indTV(1);
5073 TypedValue* to = nullptr;
5074 lookupd_gbl(m_fp, name, nameTV, to);
5075 assert(to != nullptr);
5076 tvBind(fr, to);
5077 memcpy((void*)nameTV, (void*)fr, sizeof(TypedValue));
5078 m_stack.discard();
5079 decRefStr(name);
5082 inline void OPTBLD_INLINE VMExecutionContext::iopBindS(PC& pc) {
5083 NEXT();
5084 TypedValue* fr = m_stack.topTV();
5085 TypedValue* classref = m_stack.indTV(1);
5086 TypedValue* propn = m_stack.indTV(2);
5087 TypedValue* output = propn;
5088 StringData* name;
5089 TypedValue* val;
5090 bool visible, accessible;
5091 lookup_sprop(m_fp, classref, name, propn, val, visible, accessible);
5092 if (!(visible && accessible)) {
5093 raise_error("Invalid static property access: %s::%s",
5094 classref->m_data.pcls->name()->data(),
5095 name->data());
5097 tvBind(fr, val);
5098 tvRefcountedDecRefCell(propn);
5099 memcpy(output, fr, sizeof(TypedValue));
5100 m_stack.ndiscard(2);
5101 decRefStr(name);
5104 inline void OPTBLD_INLINE VMExecutionContext::iopBindM(PC& pc) {
5105 NEXT();
5106 DECLARE_SETHELPER_ARGS
5107 TypedValue* tv1 = m_stack.topTV();
5108 if (!setHelperPre<false, true, false, true, 1,
5109 VectorLeaveCode::ConsumeAll>(MEMBERHELPERPRE_ARGS)) {
5110 // Bind the element/property with the var on the top of the stack
5111 tvBind(tv1, base);
5113 setHelperPost<1>(SETHELPERPOST_ARGS);
5116 inline void OPTBLD_INLINE VMExecutionContext::iopUnsetL(PC& pc) {
5117 NEXT();
5118 DECODE_HA(local);
5119 assert(local < m_fp->m_func->numLocals());
5120 TypedValue* tv = frame_local(m_fp, local);
5121 tvRefcountedDecRef(tv);
5122 tvWriteUninit(tv);
5125 inline void OPTBLD_INLINE VMExecutionContext::iopUnsetN(PC& pc) {
5126 NEXT();
5127 StringData* name;
5128 TypedValue* tv1 = m_stack.topTV();
5129 TypedValue* tv = nullptr;
5130 lookup_var(m_fp, name, tv1, tv);
5131 assert(!m_fp->hasInvName());
5132 if (tv != nullptr) {
5133 tvRefcountedDecRef(tv);
5134 tvWriteUninit(tv);
5136 m_stack.popC();
5137 decRefStr(name);
5140 inline void OPTBLD_INLINE VMExecutionContext::iopUnsetG(PC& pc) {
5141 NEXT();
5142 TypedValue* tv1 = m_stack.topTV();
5143 StringData* name = lookup_name(tv1);
5144 VarEnv* varEnv = m_globalVarEnv;
5145 assert(varEnv != nullptr);
5146 varEnv->unset(name);
5147 m_stack.popC();
5148 decRefStr(name);
5151 inline void OPTBLD_INLINE VMExecutionContext::iopUnsetM(PC& pc) {
5152 NEXT();
5153 DECLARE_SETHELPER_ARGS
5154 if (!setHelperPre<false, false, true, false, 0,
5155 VectorLeaveCode::LeaveLast>(MEMBERHELPERPRE_ARGS)) {
5156 switch (mcode) {
5157 case MEL:
5158 case MEC:
5159 case MET:
5160 case MEI:
5161 UnsetElem(base, curMember);
5162 break;
5163 case MPL:
5164 case MPC:
5165 case MPT: {
5166 Class* ctx = arGetContextClass(m_fp);
5167 UnsetProp(ctx, base, curMember);
5168 break;
5170 default: assert(false);
5173 setHelperPost<0>(SETHELPERPOST_ARGS);
5176 inline ActRec* OPTBLD_INLINE VMExecutionContext::fPushFuncImpl(
5177 const Func* func,
5178 int numArgs) {
5179 DEBUGGER_IF(phpBreakpointEnabled(func->name()->data()));
5180 ActRec* ar = m_stack.allocA();
5181 arSetSfp(ar, m_fp);
5182 ar->m_func = func;
5183 ar->initNumArgs(numArgs);
5184 ar->setVarEnv(nullptr);
5185 return ar;
5188 inline void OPTBLD_INLINE VMExecutionContext::iopFPushFunc(PC& pc) {
5189 NEXT();
5190 DECODE_IVA(numArgs);
5191 Cell* c1 = m_stack.topC();
5192 const Func* func = nullptr;
5193 ObjectData* origObj = nullptr;
5194 StringData* origSd = nullptr;
5195 if (IS_STRING_TYPE(c1->m_type)) {
5196 origSd = c1->m_data.pstr;
5197 func = Unit::loadFunc(origSd);
5198 } else if (c1->m_type == KindOfObject) {
5199 static StringData* invokeName = StringData::GetStaticString("__invoke");
5200 origObj = c1->m_data.pobj;
5201 const Class* cls = origObj->getVMClass();
5202 func = cls->lookupMethod(invokeName);
5203 if (func == nullptr) {
5204 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5206 } else {
5207 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5209 if (func == nullptr) {
5210 raise_error("Call to undefined function %s()", c1->m_data.pstr->data());
5212 assert(!origObj || !origSd);
5213 assert(origObj || origSd);
5214 // We've already saved origObj or origSd; we'll use them after
5215 // overwriting the pointer on the stack. Don't refcount it now; defer
5216 // till after we're done with it.
5217 m_stack.discard();
5218 ActRec* ar = fPushFuncImpl(func, numArgs);
5219 if (origObj) {
5220 if (func->attrs() & AttrStatic && !func->isClosureBody()) {
5221 ar->setClass(origObj->getVMClass());
5222 decRefObj(origObj);
5223 } else {
5224 ar->setThis(origObj);
5225 // Teleport the reference from the destroyed stack cell to the
5226 // ActRec. Don't try this at home.
5228 } else {
5229 ar->setThis(nullptr);
5230 decRefStr(origSd);
5234 inline void OPTBLD_INLINE VMExecutionContext::iopFPushFuncD(PC& pc) {
5235 NEXT();
5236 DECODE_IVA(numArgs);
5237 DECODE(Id, id);
5238 const NamedEntityPair nep = m_fp->m_func->unit()->lookupNamedEntityPairId(id);
5239 Func* func = Unit::loadFunc(nep.second, nep.first);
5240 if (func == nullptr) {
5241 raise_error("Call to undefined function %s()",
5242 m_fp->m_func->unit()->lookupLitstrId(id)->data());
5244 ActRec* ar = fPushFuncImpl(func, numArgs);
5245 ar->setThis(nullptr);
5248 inline void OPTBLD_INLINE VMExecutionContext::iopFPushFuncU(PC& pc) {
5249 NEXT();
5250 DECODE_IVA(numArgs);
5251 DECODE(Id, nsFunc);
5252 DECODE(Id, globalFunc);
5253 Unit* unit = m_fp->m_func->unit();
5254 const NamedEntityPair nep = unit->lookupNamedEntityPairId(nsFunc);
5255 Func* func = Unit::loadFunc(nep.second, nep.first);
5256 if (func == nullptr) {
5257 const NamedEntityPair nep2 = unit->lookupNamedEntityPairId(globalFunc);
5258 func = Unit::loadFunc(nep2.second, nep2.first);
5259 if (func == nullptr) {
5260 const char *funcName = unit->lookupLitstrId(nsFunc)->data();
5261 raise_error("Call to undefined function %s()", funcName);
5264 ActRec* ar = fPushFuncImpl(func, numArgs);
5265 ar->setThis(nullptr);
5268 void VMExecutionContext::fPushObjMethodImpl(
5269 Class* cls, StringData* name, ObjectData* obj, int numArgs) {
5270 const Func* f;
5271 LookupResult res = lookupObjMethod(f, cls, name, true);
5272 assert(f);
5273 ActRec* ar = m_stack.allocA();
5274 arSetSfp(ar, m_fp);
5275 ar->m_func = f;
5276 if (res == LookupResult::MethodFoundNoThis) {
5277 decRefObj(obj);
5278 ar->setClass(cls);
5279 } else {
5280 assert(res == LookupResult::MethodFoundWithThis ||
5281 res == LookupResult::MagicCallFound);
5282 /* Transfer ownership of obj to the ActRec*/
5283 ar->setThis(obj);
5285 ar->initNumArgs(numArgs);
5286 if (res == LookupResult::MagicCallFound) {
5287 ar->setInvName(name);
5288 } else {
5289 ar->setVarEnv(NULL);
5290 decRefStr(name);
5294 inline void OPTBLD_INLINE VMExecutionContext::iopFPushObjMethod(PC& pc) {
5295 NEXT();
5296 DECODE_IVA(numArgs);
5297 Cell* c1 = m_stack.topC(); // Method name.
5298 if (!IS_STRING_TYPE(c1->m_type)) {
5299 raise_error(Strings::METHOD_NAME_MUST_BE_STRING);
5301 Cell* c2 = m_stack.indC(1); // Object.
5302 if (c2->m_type != KindOfObject) {
5303 throw_call_non_object(c1->m_data.pstr->data());
5305 ObjectData* obj = c2->m_data.pobj;
5306 Class* cls = obj->getVMClass();
5307 StringData* name = c1->m_data.pstr;
5308 // We handle decReffing obj and name in fPushObjMethodImpl
5309 m_stack.ndiscard(2);
5310 fPushObjMethodImpl(cls, name, obj, numArgs);
5313 inline void OPTBLD_INLINE VMExecutionContext::iopFPushObjMethodD(PC& pc) {
5314 NEXT();
5315 DECODE_IVA(numArgs);
5316 DECODE_LITSTR(name);
5317 Cell* c1 = m_stack.topC();
5318 if (c1->m_type != KindOfObject) {
5319 throw_call_non_object(name->data());
5321 ObjectData* obj = c1->m_data.pobj;
5322 Class* cls = obj->getVMClass();
5323 // We handle decReffing obj in fPushObjMethodImpl
5324 m_stack.discard();
5325 fPushObjMethodImpl(cls, name, obj, numArgs);
5328 template<bool forwarding>
5329 void VMExecutionContext::pushClsMethodImpl(Class* cls,
5330 StringData* name,
5331 ObjectData* obj,
5332 int numArgs) {
5333 const Func* f;
5334 LookupResult res = lookupClsMethod(f, cls, name, obj, getFP(), true);
5335 if (res == LookupResult::MethodFoundNoThis ||
5336 res == LookupResult::MagicCallStaticFound) {
5337 obj = nullptr;
5338 } else {
5339 assert(obj);
5340 assert(res == LookupResult::MethodFoundWithThis ||
5341 res == LookupResult::MagicCallFound);
5342 obj->incRefCount();
5344 assert(f);
5345 ActRec* ar = m_stack.allocA();
5346 arSetSfp(ar, m_fp);
5347 ar->m_func = f;
5348 if (obj) {
5349 ar->setThis(obj);
5350 } else {
5351 if (!forwarding) {
5352 ar->setClass(cls);
5353 } else {
5354 /* Propogate the current late bound class if there is one, */
5355 /* otherwise use the class given by this instruction's input */
5356 if (m_fp->hasThis()) {
5357 cls = m_fp->getThis()->getVMClass();
5358 } else if (m_fp->hasClass()) {
5359 cls = m_fp->getClass();
5361 ar->setClass(cls);
5364 ar->initNumArgs(numArgs);
5365 if (res == LookupResult::MagicCallFound ||
5366 res == LookupResult::MagicCallStaticFound) {
5367 ar->setInvName(name);
5368 } else {
5369 ar->setVarEnv(nullptr);
5370 decRefStr(const_cast<StringData*>(name));
5374 inline void OPTBLD_INLINE VMExecutionContext::iopFPushClsMethod(PC& pc) {
5375 NEXT();
5376 DECODE_IVA(numArgs);
5377 Cell* c1 = m_stack.indC(1); // Method name.
5378 if (!IS_STRING_TYPE(c1->m_type)) {
5379 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5381 TypedValue* tv = m_stack.top();
5382 assert(tv->m_type == KindOfClass);
5383 Class* cls = tv->m_data.pcls;
5384 StringData* name = c1->m_data.pstr;
5385 // CLSMETHOD_BODY will take care of decReffing name
5386 m_stack.ndiscard(2);
5387 assert(cls && name);
5388 ObjectData* obj = m_fp->hasThis() ? m_fp->getThis() : nullptr;
5389 pushClsMethodImpl<false>(cls, name, obj, numArgs);
5392 inline void OPTBLD_INLINE VMExecutionContext::iopFPushClsMethodD(PC& pc) {
5393 NEXT();
5394 DECODE_IVA(numArgs);
5395 DECODE_LITSTR(name);
5396 DECODE(Id, classId);
5397 const NamedEntityPair &nep =
5398 m_fp->m_func->unit()->lookupNamedEntityPairId(classId);
5399 Class* cls = Unit::loadClass(nep.second, nep.first);
5400 if (cls == nullptr) {
5401 raise_error(Strings::UNKNOWN_CLASS, nep.first->data());
5403 ObjectData* obj = m_fp->hasThis() ? m_fp->getThis() : nullptr;
5404 pushClsMethodImpl<false>(cls, name, obj, numArgs);
5407 inline void OPTBLD_INLINE VMExecutionContext::iopFPushClsMethodF(PC& pc) {
5408 NEXT();
5409 DECODE_IVA(numArgs);
5410 Cell* c1 = m_stack.indC(1); // Method name.
5411 if (!IS_STRING_TYPE(c1->m_type)) {
5412 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5414 TypedValue* tv = m_stack.top();
5415 assert(tv->m_type == KindOfClass);
5416 Class* cls = tv->m_data.pcls;
5417 assert(cls);
5418 StringData* name = c1->m_data.pstr;
5419 // CLSMETHOD_BODY will take care of decReffing name
5420 m_stack.ndiscard(2);
5421 ObjectData* obj = m_fp->hasThis() ? m_fp->getThis() : nullptr;
5422 pushClsMethodImpl<true>(cls, name, obj, numArgs);
5425 #undef CLSMETHOD_BODY
5427 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCtor(PC& pc) {
5428 NEXT();
5429 DECODE_IVA(numArgs);
5430 TypedValue* tv = m_stack.topTV();
5431 assert(tv->m_type == KindOfClass);
5432 Class* cls = tv->m_data.pcls;
5433 assert(cls != nullptr);
5434 // Lookup the ctor
5435 const Func* f;
5436 LookupResult res UNUSED = lookupCtorMethod(f, cls, true);
5437 assert(res == LookupResult::MethodFoundWithThis);
5438 // Replace input with uninitialized instance.
5439 ObjectData* this_ = newInstance(cls);
5440 TRACE(2, "FPushCtor: just new'ed an instance of class %s: %p\n",
5441 cls->name()->data(), this_);
5442 this_->incRefCount();
5443 this_->incRefCount();
5444 tv->m_type = KindOfObject;
5445 tv->m_data.pobj = this_;
5446 // Push new activation record.
5447 ActRec* ar = m_stack.allocA();
5448 arSetSfp(ar, m_fp);
5449 ar->m_func = f;
5450 ar->setThis(this_);
5451 ar->initNumArgs(numArgs, true /* isFPushCtor */);
5452 arSetSfp(ar, m_fp);
5453 ar->setVarEnv(nullptr);
5456 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCtorD(PC& pc) {
5457 NEXT();
5458 DECODE_IVA(numArgs);
5459 DECODE(Id, id);
5460 const NamedEntityPair &nep =
5461 m_fp->m_func->unit()->lookupNamedEntityPairId(id);
5462 Class* cls = Unit::loadClass(nep.second, nep.first);
5463 if (cls == nullptr) {
5464 raise_error(Strings::UNKNOWN_CLASS,
5465 m_fp->m_func->unit()->lookupLitstrId(id)->data());
5467 // Lookup the ctor
5468 const Func* f;
5469 LookupResult res UNUSED = lookupCtorMethod(f, cls, true);
5470 assert(res == LookupResult::MethodFoundWithThis);
5471 // Push uninitialized instance.
5472 ObjectData* this_ = newInstance(cls);
5473 TRACE(2, "FPushCtorD: new'ed an instance of class %s: %p\n",
5474 cls->name()->data(), this_);
5475 this_->incRefCount();
5476 m_stack.pushObject(this_);
5477 // Push new activation record.
5478 ActRec* ar = m_stack.allocA();
5479 arSetSfp(ar, m_fp);
5480 ar->m_func = f;
5481 ar->setThis(this_);
5482 ar->initNumArgs(numArgs, true /* isFPushCtor */);
5483 ar->setVarEnv(nullptr);
5486 inline void OPTBLD_INLINE VMExecutionContext::iopDecodeCufIter(PC& pc) {
5487 PC origPc = pc;
5488 NEXT();
5489 DECODE_IA(itId);
5490 DECODE(Offset, offset);
5492 Iter* it = frame_iter(m_fp, itId);
5493 CufIter &cit = it->cuf();
5495 ObjectData* obj = nullptr;
5496 HPHP::Class* cls = nullptr;
5497 StringData* invName = nullptr;
5498 TypedValue *func = m_stack.topTV();
5500 ActRec* ar = m_fp;
5501 if (m_fp->m_func->isBuiltin()) {
5502 ar = getOuterVMFrame(ar);
5504 const Func* f = vm_decode_function(tvAsVariant(func),
5505 ar, false,
5506 obj, cls, invName,
5507 false);
5509 if (f == nullptr) {
5510 pc = origPc + offset;
5511 } else {
5512 cit.setFunc(f);
5513 if (obj) {
5514 cit.setCtx(obj);
5515 obj->incRefCount();
5516 } else {
5517 cit.setCtx(cls);
5519 cit.setName(invName);
5521 m_stack.popC();
5524 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCufIter(PC& pc) {
5525 NEXT();
5526 DECODE_IVA(numArgs);
5527 DECODE_IA(itId);
5529 Iter* it = frame_iter(m_fp, itId);
5531 auto f = it->cuf().func();
5532 auto o = it->cuf().ctx();
5533 auto n = it->cuf().name();
5535 ActRec* ar = m_stack.allocA();
5536 arSetSfp(ar, m_fp);
5537 ar->m_func = f;
5538 ar->m_this = (ObjectData*)o;
5539 if (o && !(uintptr_t(o) & 1)) ar->m_this->incRefCount();
5540 if (n) {
5541 ar->setInvName(n);
5542 n->incRefCount();
5543 } else {
5544 ar->setVarEnv(nullptr);
5546 ar->initNumArgs(numArgs, false /* isFPushCtor */);
5549 inline void OPTBLD_INLINE VMExecutionContext::doFPushCuf(PC& pc,
5550 bool forward,
5551 bool safe) {
5552 NEXT();
5553 DECODE_IVA(numArgs);
5555 TypedValue func = m_stack.topTV()[safe];
5557 ObjectData* obj = nullptr;
5558 HPHP::Class* cls = nullptr;
5559 StringData* invName = nullptr;
5561 const Func* f = vm_decode_function(tvAsVariant(&func), getFP(),
5562 forward,
5563 obj, cls, invName,
5564 !safe);
5566 if (safe) m_stack.topTV()[1] = m_stack.topTV()[0];
5567 m_stack.ndiscard(1);
5568 if (f == nullptr) {
5569 f = SystemLib::s_nullFunc;
5570 if (safe) {
5571 m_stack.pushFalse();
5573 } else if (safe) {
5574 m_stack.pushTrue();
5577 ActRec* ar = m_stack.allocA();
5578 arSetSfp(ar, m_fp);
5579 ar->m_func = f;
5580 if (obj) {
5581 ar->setThis(obj);
5582 obj->incRefCount();
5583 } else if (cls) {
5584 ar->setClass(cls);
5585 } else {
5586 ar->setThis(nullptr);
5588 ar->initNumArgs(numArgs, false /* isFPushCtor */);
5589 if (invName) {
5590 ar->setInvName(invName);
5591 } else {
5592 ar->setVarEnv(nullptr);
5594 tvRefcountedDecRef(&func);
5597 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCuf(PC& pc) {
5598 doFPushCuf(pc, false, false);
5601 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCufF(PC& pc) {
5602 doFPushCuf(pc, true, false);
5605 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCufSafe(PC& pc) {
5606 doFPushCuf(pc, false, true);
5609 static inline ActRec* arFromInstr(TypedValue* sp, const Op* pc) {
5610 return arFromSpOffset((ActRec*)sp, instrSpToArDelta(pc));
5613 inline void OPTBLD_INLINE VMExecutionContext::iopFPassC(PC& pc) {
5614 #ifdef DEBUG
5615 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc);
5616 #endif
5617 NEXT();
5618 DECODE_IVA(paramId);
5619 #ifdef DEBUG
5620 assert(paramId < ar->numArgs());
5621 #endif
5624 #define FPASSC_CHECKED_PRELUDE \
5625 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc); \
5626 NEXT(); \
5627 DECODE_IVA(paramId); \
5628 assert(paramId < ar->numArgs()); \
5629 const Func* func = ar->m_func;
5631 inline void OPTBLD_INLINE VMExecutionContext::iopFPassCW(PC& pc) {
5632 FPASSC_CHECKED_PRELUDE
5633 if (func->mustBeRef(paramId)) {
5634 TRACE(1, "FPassCW: function %s(%d) param %d is by reference, "
5635 "raising a strict warning (attr:0x%x)\n",
5636 func->name()->data(), func->numParams(), paramId,
5637 func->info() ? func->info()->attribute : 0);
5638 raise_strict_warning("Only variables should be passed by reference");
5642 inline void OPTBLD_INLINE VMExecutionContext::iopFPassCE(PC& pc) {
5643 FPASSC_CHECKED_PRELUDE
5644 if (func->mustBeRef(paramId)) {
5645 TRACE(1, "FPassCE: function %s(%d) param %d is by reference, "
5646 "throwing a fatal error (attr:0x%x)\n",
5647 func->name()->data(), func->numParams(), paramId,
5648 func->info() ? func->info()->attribute : 0);
5649 raise_error("Cannot pass parameter %d by reference", paramId+1);
5653 #undef FPASSC_CHECKED_PRELUDE
5655 inline void OPTBLD_INLINE VMExecutionContext::iopFPassV(PC& pc) {
5656 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc);
5657 NEXT();
5658 DECODE_IVA(paramId);
5659 assert(paramId < ar->numArgs());
5660 const Func* func = ar->m_func;
5661 if (!func->byRef(paramId)) {
5662 m_stack.unbox();
5666 inline void OPTBLD_INLINE VMExecutionContext::iopFPassR(PC& pc) {
5667 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc);
5668 NEXT();
5669 DECODE_IVA(paramId);
5670 assert(paramId < ar->numArgs());
5671 const Func* func = ar->m_func;
5672 if (func->byRef(paramId)) {
5673 TypedValue* tv = m_stack.topTV();
5674 if (tv->m_type != KindOfRef) {
5675 tvBox(tv);
5677 } else {
5678 if (m_stack.topTV()->m_type == KindOfRef) {
5679 m_stack.unbox();
5684 inline void OPTBLD_INLINE VMExecutionContext::iopFPassL(PC& pc) {
5685 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc);
5686 NEXT();
5687 DECODE_IVA(paramId);
5688 DECODE_HA(local);
5689 assert(paramId < ar->numArgs());
5690 TypedValue* fr = frame_local(m_fp, local);
5691 TypedValue* to = m_stack.allocTV();
5692 if (!ar->m_func->byRef(paramId)) {
5693 cgetl_body(m_fp, fr, to, local);
5694 } else {
5695 vgetl_body(fr, to);
5699 inline void OPTBLD_INLINE VMExecutionContext::iopFPassN(PC& pc) {
5700 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc);
5701 PC origPc = pc;
5702 NEXT();
5703 DECODE_IVA(paramId);
5704 assert(paramId < ar->numArgs());
5705 if (!ar->m_func->byRef(paramId)) {
5706 iopCGetN(origPc);
5707 } else {
5708 iopVGetN(origPc);
5712 inline void OPTBLD_INLINE VMExecutionContext::iopFPassG(PC& pc) {
5713 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc);
5714 PC origPc = pc;
5715 NEXT();
5716 DECODE_IVA(paramId);
5717 assert(paramId < ar->numArgs());
5718 if (!ar->m_func->byRef(paramId)) {
5719 iopCGetG(origPc);
5720 } else {
5721 iopVGetG(origPc);
5725 inline void OPTBLD_INLINE VMExecutionContext::iopFPassS(PC& pc) {
5726 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc);
5727 PC origPc = pc;
5728 NEXT();
5729 DECODE_IVA(paramId);
5730 assert(paramId < ar->numArgs());
5731 if (!ar->m_func->byRef(paramId)) {
5732 iopCGetS(origPc);
5733 } else {
5734 iopVGetS(origPc);
5738 void VMExecutionContext::iopFPassM(PC& pc) {
5739 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc);
5740 NEXT();
5741 DECODE_IVA(paramId);
5742 assert(paramId < ar->numArgs());
5743 if (!ar->m_func->byRef(paramId)) {
5744 DECLARE_GETHELPER_ARGS
5745 getHelper(GETHELPER_ARGS);
5746 if (tvRet->m_type == KindOfRef) {
5747 tvUnbox(tvRet);
5749 } else {
5750 DECLARE_SETHELPER_ARGS
5751 TypedValue* tv1 = m_stack.allocTV();
5752 tvWriteUninit(tv1);
5753 if (!setHelperPre<false, true, false, true, 1,
5754 VectorLeaveCode::ConsumeAll>(MEMBERHELPERPRE_ARGS)) {
5755 if (base->m_type != KindOfRef) {
5756 tvBox(base);
5758 varDup(*base, *tv1);
5759 } else {
5760 tvWriteNull(tv1);
5761 tvBox(tv1);
5763 setHelperPost<1>(SETHELPERPOST_ARGS);
5767 bool VMExecutionContext::doFCall(ActRec* ar, PC& pc) {
5768 assert(getOuterVMFrame(ar) == m_fp);
5769 ar->m_savedRip =
5770 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedFrame());
5771 assert(isReturnHelper(ar->m_savedRip));
5772 TRACE(3, "FCall: pc %p func %p base %d\n", m_pc,
5773 m_fp->m_func->unit()->entry(),
5774 int(m_fp->m_func->base()));
5775 ar->m_soff = m_fp->m_func->unit()->offsetOf(pc)
5776 - (uintptr_t)m_fp->m_func->base();
5777 assert(pcOff() >= m_fp->m_func->base());
5778 prepareFuncEntry(ar, pc);
5779 SYNC();
5780 if (EventHook::FunctionEnter(ar, EventHook::NormalFunc)) return true;
5781 pc = m_pc;
5782 return false;
5785 inline void OPTBLD_INLINE VMExecutionContext::iopFCall(PC& pc) {
5786 ActRec* ar = arFromInstr(m_stack.top(), (Op*)pc);
5787 NEXT();
5788 DECODE_IVA(numArgs);
5789 assert(numArgs == ar->numArgs());
5790 checkStack(m_stack, ar->m_func);
5791 doFCall(ar, pc);
5794 // Return a function pointer type for calling a builtin with a given
5795 // return value and args.
5796 template<class Ret, class... Args> struct NativeFunction {
5797 typedef Ret (*type)(Args...);
5800 // Recursively pack all parameters up to call a native builtin.
5801 template<class Ret, size_t NArgs, size_t CurArg> struct NativeFuncCaller;
5802 template<class Ret, size_t NArgs, size_t CurArg> struct NativeFuncCaller {
5803 template<class... Args>
5804 static Ret call(const Func* func, TypedValue* tvs, Args... args) {
5805 typedef NativeFuncCaller<Ret,NArgs - 1,CurArg + 1> NextArgT;
5806 DataType type = func->params()[CurArg].builtinType();
5807 if (type == KindOfDouble) {
5808 // pass TV.m_data.dbl by value with C++ calling convention for doubles
5809 return NextArgT::call(func, tvs - 1, args..., tvs->m_data.dbl);
5811 if (type == KindOfInt64 || type == KindOfBoolean) {
5812 // pass TV.m_data.num by value
5813 return NextArgT::call(func, tvs - 1, args..., tvs->m_data.num);
5815 if (IS_STRING_TYPE(type) || type == KindOfArray || type == KindOfObject) {
5816 // pass ptr to TV.m_data for String&, Array&, or Object&
5817 return NextArgT::call(func, tvs - 1, args..., &tvs->m_data);
5819 // final case is for passing full value as Variant&
5820 return NextArgT::call(func, tvs - 1, args..., tvs);
5823 template<class Ret, size_t CurArg> struct NativeFuncCaller<Ret,0,CurArg> {
5824 template<class... Args>
5825 static Ret call(const Func* f, TypedValue*, Args... args) {
5826 typedef typename NativeFunction<Ret,Args...>::type FuncType;
5827 return reinterpret_cast<FuncType>(f->nativeFuncPtr())(args...);
5831 template<class Ret>
5832 static Ret makeNativeCall(const Func* f, TypedValue* args, size_t numArgs) {
5833 static_assert(kMaxBuiltinArgs == 5,
5834 "makeNativeCall needs updates for kMaxBuiltinArgs");
5835 switch (numArgs) {
5836 case 0: return NativeFuncCaller<Ret,0,0>::call(f, args);
5837 case 1: return NativeFuncCaller<Ret,1,0>::call(f, args);
5838 case 2: return NativeFuncCaller<Ret,2,0>::call(f, args);
5839 case 3: return NativeFuncCaller<Ret,3,0>::call(f, args);
5840 case 4: return NativeFuncCaller<Ret,4,0>::call(f, args);
5841 case 5: return NativeFuncCaller<Ret,5,0>::call(f, args);
5842 default: assert(false);
5844 not_reached();
5847 template<class Ret>
5848 static int makeNativeRefCall(const Func* f, Ret* ret,
5849 TypedValue* args, size_t numArgs) {
5850 switch (numArgs) {
5851 case 0: return NativeFuncCaller<int64_t,0,0>::call(f, args, ret);
5852 case 1: return NativeFuncCaller<int64_t,1,0>::call(f, args, ret);
5853 case 2: return NativeFuncCaller<int64_t,2,0>::call(f, args, ret);
5854 case 3: return NativeFuncCaller<int64_t,3,0>::call(f, args, ret);
5855 case 4: return NativeFuncCaller<int64_t,4,0>::call(f, args, ret);
5856 case 5: return NativeFuncCaller<int64_t,5,0>::call(f, args, ret);
5857 default: assert(false);
5859 not_reached();
5862 inline void OPTBLD_INLINE VMExecutionContext::iopFCallBuiltin(PC& pc) {
5863 NEXT();
5864 DECODE_IVA(numArgs);
5865 DECODE_IVA(numNonDefault);
5866 DECODE(Id, id);
5867 const NamedEntity* ne = m_fp->m_func->unit()->lookupNamedEntityId(id);
5868 Func* func = Unit::lookupFunc(ne);
5869 if (func == nullptr) {
5870 raise_error("Undefined function: %s",
5871 m_fp->m_func->unit()->lookupLitstrId(id)->data());
5873 TypedValue* args = m_stack.indTV(numArgs-1);
5874 assert(numArgs == func->numParams());
5875 bool zendParamMode = func->info()->attribute & ClassInfo::ZendParamMode;
5876 TypedValue ret;
5878 for (int i = 0; i < numNonDefault; i++) {
5879 const Func::ParamInfo& pi = func->params()[i];
5881 if (zendParamMode) {
5882 #define CASE(kind) case KindOf ## kind : do { \
5883 if (!tvCoerceParamTo ## kind ## InPlace(&args[-i])) { \
5884 ret.m_type = KindOfNull; \
5885 goto free_frame; \
5887 break; \
5888 } while (0); break;
5889 switch (pi.builtinType()) {
5890 CASE(Boolean)
5891 CASE(Int64)
5892 CASE(Double)
5893 CASE(String)
5894 CASE(Array)
5895 CASE(Object)
5896 case KindOfUnknown:
5897 break;
5898 default:
5899 not_reached();
5901 #undef CASE
5903 } else {
5905 #define CASE(kind) case KindOf ## kind : do { \
5906 tvCastTo ## kind ## InPlace(&args[-i]); break; \
5907 } while (0); break;
5908 switch (pi.builtinType()) {
5909 CASE(Boolean)
5910 CASE(Int64)
5911 CASE(Double)
5912 CASE(String)
5913 CASE(Array)
5914 CASE(Object)
5915 case KindOfUnknown:
5916 break;
5917 default:
5918 not_reached();
5920 #undef CASE
5925 ret.m_type = func->returnType();
5926 switch (func->returnType()) {
5927 case KindOfBoolean:
5928 ret.m_data.num = makeNativeCall<bool>(func, args, numArgs);
5929 break;
5930 case KindOfNull: /* void return type */
5931 case KindOfInt64:
5932 ret.m_data.num = makeNativeCall<int64_t>(func, args, numArgs);
5933 break;
5934 case KindOfString:
5935 case KindOfStaticString:
5936 case KindOfArray:
5937 case KindOfObject:
5938 makeNativeRefCall(func, &ret.m_data, args, numArgs);
5939 if (ret.m_data.num == 0) {
5940 ret.m_type = KindOfNull;
5942 break;
5943 case KindOfUnknown:
5944 makeNativeRefCall(func, &ret, args, numArgs);
5945 if (ret.m_type == KindOfUninit) {
5946 ret.m_type = KindOfNull;
5948 break;
5949 default:
5950 not_reached();
5953 free_frame:
5954 frame_free_args(args, numNonDefault);
5955 m_stack.ndiscard(numArgs);
5956 memcpy(m_stack.allocTV(), &ret, sizeof(TypedValue));
5959 bool VMExecutionContext::prepareArrayArgs(ActRec* ar,
5960 ArrayData* args) {
5961 if (UNLIKELY(ar->hasInvName())) {
5962 m_stack.pushStringNoRc(ar->getInvName());
5963 m_stack.pushArray(args);
5964 ar->setVarEnv(0);
5965 ar->initNumArgs(2);
5966 return true;
5969 int nargs = args->size();
5970 const Func* f = ar->m_func;
5971 int nparams = f->numParams();
5972 int extra = nargs - nparams;
5973 if (extra < 0) {
5974 extra = 0;
5975 nparams = nargs;
5977 ssize_t pos = args->iter_begin();
5978 for (int i = 0; i < nparams; ++i) {
5979 TypedValue* from = const_cast<TypedValue*>(
5980 args->getValueRef(pos).asTypedValue());
5981 TypedValue* to = m_stack.allocTV();
5982 if (UNLIKELY(f->byRef(i))) {
5983 if (UNLIKELY(!tvAsVariant(from).isReferenced())) {
5984 raise_warning("Parameter %d to %s() expected to be a reference, "
5985 "value given", i + 1, f->fullName()->data());
5986 if (skipCufOnInvalidParams) {
5987 m_stack.discard();
5988 while (i--) m_stack.popTV();
5989 m_stack.popAR();
5990 m_stack.pushNull();
5991 return false;
5994 tvDup(*from, *to);
5995 } else {
5996 tvDup(*from, *to);
5997 if (UNLIKELY(to->m_type == KindOfRef)) {
5998 tvUnbox(to);
6001 pos = args->iter_advance(pos);
6003 if (extra && (ar->m_func->attrs() & AttrMayUseVV)) {
6004 ExtraArgs* extraArgs = ExtraArgs::allocateUninit(extra);
6005 for (int i = 0; i < extra; ++i) {
6006 TypedValue* to = extraArgs->getExtraArg(i);
6007 tvDup(*args->getValueRef(pos).asTypedValue(), *to);
6008 if (to->m_type == KindOfRef && to->m_data.pref->_count == 2) {
6009 tvUnbox(to);
6011 pos = args->iter_advance(pos);
6013 ar->setExtraArgs(extraArgs);
6014 ar->initNumArgs(nargs);
6015 } else {
6016 ar->initNumArgs(nparams);
6019 return true;
6022 static void cleanupParamsAndActRec(Stack& stack,
6023 ActRec* ar,
6024 ExtraArgs* extraArgs) {
6025 assert(stack.top() + (extraArgs ?
6026 ar->m_func->numParams() :
6027 ar->numArgs()) == (void*)ar);
6028 if (extraArgs) {
6029 const int numExtra = ar->numArgs() - ar->m_func->numParams();
6030 ExtraArgs::deallocate(extraArgs, numExtra);
6032 while (stack.top() != (void*)ar) {
6033 stack.popTV();
6035 stack.popAR();
6038 bool VMExecutionContext::doFCallArray(PC& pc) {
6039 ActRec* ar = (ActRec*)(m_stack.top() + 1);
6040 assert(ar->numArgs() == 1);
6042 Cell* c1 = m_stack.topC();
6043 if (skipCufOnInvalidParams && UNLIKELY(c1->m_type != KindOfArray)) {
6044 // task #1756122
6045 // this is what we /should/ do, but our code base depends
6046 // on the broken behavior of casting the second arg to an
6047 // array.
6048 cleanupParamsAndActRec(m_stack, ar, nullptr);
6049 m_stack.pushNull();
6050 raise_warning("call_user_func_array() expects parameter 2 to be array");
6051 return false;
6054 const Func* func = ar->m_func;
6056 Array args(LIKELY(c1->m_type == KindOfArray) ? c1->m_data.parr :
6057 tvAsVariant(c1).toArray().get());
6058 m_stack.popTV();
6059 checkStack(m_stack, func);
6061 assert(ar->m_savedRbp == (uint64_t)m_fp);
6062 assert(!ar->m_func->isGenerator());
6063 ar->m_savedRip =
6064 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedFrame());
6065 assert(isReturnHelper(ar->m_savedRip));
6066 TRACE(3, "FCallArray: pc %p func %p base %d\n", m_pc,
6067 m_fp->m_func->unit()->entry(),
6068 int(m_fp->m_func->base()));
6069 ar->m_soff = m_fp->m_func->unit()->offsetOf(pc)
6070 - (uintptr_t)m_fp->m_func->base();
6071 assert(pcOff() > m_fp->m_func->base());
6073 if (UNLIKELY(!prepareArrayArgs(ar, args.get()))) return false;
6076 if (UNLIKELY(!(prepareFuncEntry(ar, pc)))) {
6077 return false;
6079 SYNC();
6080 if (UNLIKELY(!EventHook::FunctionEnter(ar, EventHook::NormalFunc))) {
6081 pc = m_pc;
6082 return false;
6084 return true;
6087 inline void OPTBLD_INLINE VMExecutionContext::iopFCallArray(PC& pc) {
6088 NEXT();
6089 (void)doFCallArray(pc);
6092 inline void OPTBLD_INLINE VMExecutionContext::iopCufSafeArray(PC& pc) {
6093 NEXT();
6094 Array ret;
6095 ret.append(tvAsVariant(m_stack.top() + 1));
6096 ret.appendWithRef(tvAsVariant(m_stack.top() + 0));
6097 m_stack.popTV();
6098 m_stack.popTV();
6099 tvAsVariant(m_stack.top()) = ret;
6102 inline void OPTBLD_INLINE VMExecutionContext::iopCufSafeReturn(PC& pc) {
6103 NEXT();
6104 bool ok = tvAsVariant(m_stack.top() + 1).toBoolean();
6105 tvRefcountedDecRef(m_stack.top() + 1);
6106 tvRefcountedDecRef(m_stack.top() + (ok ? 2 : 0));
6107 if (ok) m_stack.top()[2] = m_stack.top()[0];
6108 m_stack.ndiscard(2);
6111 inline bool VMExecutionContext::initIterator(PC& pc, PC& origPc, Iter* it,
6112 Offset offset, Cell* c1) {
6113 bool hasElems = it->init(c1);
6114 if (!hasElems) {
6115 ITER_SKIP(offset);
6117 m_stack.popC();
6118 return hasElems;
6121 inline void OPTBLD_INLINE VMExecutionContext::iopIterInit(PC& pc) {
6122 PC origPc = pc;
6123 NEXT();
6124 DECODE_IA(itId);
6125 DECODE(Offset, offset);
6126 DECODE_HA(val);
6127 Cell* c1 = m_stack.topC();
6128 Iter* it = frame_iter(m_fp, itId);
6129 TypedValue* tv1 = frame_local(m_fp, val);
6130 if (initIterator(pc, origPc, it, offset, c1)) {
6131 tvAsVariant(tv1) = it->arr().second();
6135 inline void OPTBLD_INLINE VMExecutionContext::iopIterInitK(PC& pc) {
6136 PC origPc = pc;
6137 NEXT();
6138 DECODE_IA(itId);
6139 DECODE(Offset, offset);
6140 DECODE_HA(val);
6141 DECODE_HA(key);
6142 Cell* c1 = m_stack.topC();
6143 Iter* it = frame_iter(m_fp, itId);
6144 TypedValue* tv1 = frame_local(m_fp, val);
6145 TypedValue* tv2 = frame_local(m_fp, key);
6146 if (initIterator(pc, origPc, it, offset, c1)) {
6147 tvAsVariant(tv1) = it->arr().second();
6148 tvAsVariant(tv2) = it->arr().first();
6152 inline void OPTBLD_INLINE VMExecutionContext::iopWIterInit(PC& pc) {
6153 PC origPc = pc;
6154 NEXT();
6155 DECODE_IA(itId);
6156 DECODE(Offset, offset);
6157 DECODE_HA(val);
6158 Cell* c1 = m_stack.topC();
6159 Iter* it = frame_iter(m_fp, itId);
6160 TypedValue* tv1 = frame_local(m_fp, val);
6161 if (initIterator(pc, origPc, it, offset, c1)) {
6162 tvAsVariant(tv1) = withRefBind(it->arr().secondRef());
6166 inline void OPTBLD_INLINE VMExecutionContext::iopWIterInitK(PC& pc) {
6167 PC origPc = pc;
6168 NEXT();
6169 DECODE_IA(itId);
6170 DECODE(Offset, offset);
6171 DECODE_HA(val);
6172 DECODE_HA(key);
6173 Cell* c1 = m_stack.topC();
6174 Iter* it = frame_iter(m_fp, itId);
6175 TypedValue* tv1 = frame_local(m_fp, val);
6176 TypedValue* tv2 = frame_local(m_fp, key);
6177 if (initIterator(pc, origPc, it, offset, c1)) {
6178 tvAsVariant(tv1) = withRefBind(it->arr().secondRef());
6179 tvAsVariant(tv2) = it->arr().first();
6184 inline bool VMExecutionContext::initIteratorM(PC& pc, PC& origPc, Iter* it,
6185 Offset offset, Var* v1,
6186 TypedValue *val,
6187 TypedValue *key) {
6188 bool hasElems = false;
6189 TypedValue* rtv = v1->m_data.pref->tv();
6190 if (rtv->m_type == KindOfArray) {
6191 hasElems = new_miter_array_key(it, v1->m_data.pref, val, key);
6192 } else if (rtv->m_type == KindOfObject) {
6193 Class* ctx = arGetContextClass(g_vmContext->getFP());
6194 hasElems = new_miter_object(it, v1->m_data.pref, ctx, val, key);
6195 } else {
6196 hasElems = new_miter_other(it, v1->m_data.pref);
6199 if (!hasElems) {
6200 ITER_SKIP(offset);
6203 m_stack.popV();
6204 return hasElems;
6207 inline void OPTBLD_INLINE VMExecutionContext::iopMIterInit(PC& pc) {
6208 PC origPc = pc;
6209 NEXT();
6210 DECODE_IA(itId);
6211 DECODE(Offset, offset);
6212 DECODE_HA(val);
6213 Var* v1 = m_stack.topV();
6214 assert(v1->m_type == KindOfRef);
6215 Iter* it = frame_iter(m_fp, itId);
6216 TypedValue* tv1 = frame_local(m_fp, val);
6217 initIteratorM(pc, origPc, it, offset, v1, tv1, nullptr);
6220 inline void OPTBLD_INLINE VMExecutionContext::iopMIterInitK(PC& pc) {
6221 PC origPc = pc;
6222 NEXT();
6223 DECODE_IA(itId);
6224 DECODE(Offset, offset);
6225 DECODE_HA(val);
6226 DECODE_HA(key);
6227 Var* v1 = m_stack.topV();
6228 assert(v1->m_type == KindOfRef);
6229 Iter* it = frame_iter(m_fp, itId);
6230 TypedValue* tv1 = frame_local(m_fp, val);
6231 TypedValue* tv2 = frame_local(m_fp, key);
6232 initIteratorM(pc, origPc, it, offset, v1, tv1, tv2);
6235 inline void OPTBLD_INLINE VMExecutionContext::iopIterNext(PC& pc) {
6236 PC origPc = pc;
6237 NEXT();
6238 DECODE_IA(itId);
6239 DECODE(Offset, offset);
6240 DECODE_HA(val);
6241 Iter* it = frame_iter(m_fp, itId);
6242 TypedValue* tv1 = frame_local(m_fp, val);
6243 if (it->next()) {
6244 ITER_SKIP(offset);
6245 tvAsVariant(tv1) = it->arr().second();
6249 inline void OPTBLD_INLINE VMExecutionContext::iopIterNextK(PC& pc) {
6250 PC origPc = pc;
6251 NEXT();
6252 DECODE_IA(itId);
6253 DECODE(Offset, offset);
6254 DECODE_HA(val);
6255 DECODE_HA(key);
6256 Iter* it = frame_iter(m_fp, itId);
6257 TypedValue* tv1 = frame_local(m_fp, val);
6258 TypedValue* tv2 = frame_local(m_fp, key);
6259 if (it->next()) {
6260 ITER_SKIP(offset);
6261 tvAsVariant(tv1) = it->arr().second();
6262 tvAsVariant(tv2) = it->arr().first();
6266 inline void OPTBLD_INLINE VMExecutionContext::iopWIterNext(PC& pc) {
6267 PC origPc = pc;
6268 NEXT();
6269 DECODE_IA(itId);
6270 DECODE(Offset, offset);
6271 DECODE_HA(val);
6272 Iter* it = frame_iter(m_fp, itId);
6273 TypedValue* tv1 = frame_local(m_fp, val);
6274 if (it->next()) {
6275 ITER_SKIP(offset);
6276 tvAsVariant(tv1) = withRefBind(it->arr().secondRef());
6280 inline void OPTBLD_INLINE VMExecutionContext::iopWIterNextK(PC& pc) {
6281 PC origPc = pc;
6282 NEXT();
6283 DECODE_IA(itId);
6284 DECODE(Offset, offset);
6285 DECODE_HA(val);
6286 DECODE_HA(key);
6287 Iter* it = frame_iter(m_fp, itId);
6288 TypedValue* tv1 = frame_local(m_fp, val);
6289 TypedValue* tv2 = frame_local(m_fp, key);
6290 if (it->next()) {
6291 ITER_SKIP(offset);
6292 tvAsVariant(tv1) = withRefBind(it->arr().secondRef());
6293 tvAsVariant(tv2) = it->arr().first();
6297 inline void OPTBLD_INLINE VMExecutionContext::iopMIterNext(PC& pc) {
6298 PC origPc = pc;
6299 NEXT();
6300 DECODE_IA(itId);
6301 DECODE(Offset, offset);
6302 DECODE_HA(val);
6303 Iter* it = frame_iter(m_fp, itId);
6304 TypedValue* tv1 = frame_local(m_fp, val);
6305 if (miter_next_key(it, tv1, nullptr)) {
6306 ITER_SKIP(offset);
6310 inline void OPTBLD_INLINE VMExecutionContext::iopMIterNextK(PC& pc) {
6311 PC origPc = pc;
6312 NEXT();
6313 DECODE_IA(itId);
6314 DECODE(Offset, offset);
6315 DECODE_HA(val);
6316 DECODE_HA(key);
6317 Iter* it = frame_iter(m_fp, itId);
6318 TypedValue* tv1 = frame_local(m_fp, val);
6319 TypedValue* tv2 = frame_local(m_fp, key);
6320 if (miter_next_key(it, tv1, tv2)) {
6321 ITER_SKIP(offset);
6325 inline void OPTBLD_INLINE VMExecutionContext::iopIterFree(PC& pc) {
6326 NEXT();
6327 DECODE_IA(itId);
6328 Iter* it = frame_iter(m_fp, itId);
6329 it->free();
6332 inline void OPTBLD_INLINE VMExecutionContext::iopMIterFree(PC& pc) {
6333 NEXT();
6334 DECODE_IA(itId);
6335 Iter* it = frame_iter(m_fp, itId);
6336 it->mfree();
6339 inline void OPTBLD_INLINE VMExecutionContext::iopCIterFree(PC& pc) {
6340 NEXT();
6341 DECODE_IA(itId);
6342 Iter* it = frame_iter(m_fp, itId);
6343 it->cfree();
6346 inline void OPTBLD_INLINE inclOp(VMExecutionContext *ec, PC &pc,
6347 InclOpFlags flags) {
6348 NEXT();
6349 Cell* c1 = ec->m_stack.topC();
6350 String path(prepareKey(c1));
6351 bool initial;
6352 TRACE(2, "inclOp %s %s %s %s \"%s\"\n",
6353 flags & InclOpOnce ? "Once" : "",
6354 flags & InclOpDocRoot ? "DocRoot" : "",
6355 flags & InclOpRelative ? "Relative" : "",
6356 flags & InclOpFatal ? "Fatal" : "",
6357 path->data());
6359 Unit* u = flags & (InclOpDocRoot|InclOpRelative) ?
6360 ec->evalIncludeRoot(path.get(), flags, &initial) :
6361 ec->evalInclude(path.get(), ec->m_fp->m_func->unit()->filepath(), &initial);
6362 ec->m_stack.popC();
6363 if (u == nullptr) {
6364 ((flags & InclOpFatal) ?
6365 (void (*)(const char *, ...))raise_error :
6366 (void (*)(const char *, ...))raise_warning)("File not found: %s",
6367 path->data());
6368 ec->m_stack.pushFalse();
6369 } else {
6370 if (!(flags & InclOpOnce) || initial) {
6371 ec->evalUnit(u, pc, EventHook::PseudoMain);
6372 } else {
6373 Stats::inc(Stats::PseudoMain_Guarded);
6374 ec->m_stack.pushTrue();
6379 inline void OPTBLD_INLINE VMExecutionContext::iopIncl(PC& pc) {
6380 inclOp(this, pc, InclOpDefault);
6383 inline void OPTBLD_INLINE VMExecutionContext::iopInclOnce(PC& pc) {
6384 inclOp(this, pc, InclOpOnce);
6387 inline void OPTBLD_INLINE VMExecutionContext::iopReq(PC& pc) {
6388 inclOp(this, pc, InclOpFatal);
6391 inline void OPTBLD_INLINE VMExecutionContext::iopReqOnce(PC& pc) {
6392 inclOp(this, pc, InclOpFatal | InclOpOnce);
6395 inline void OPTBLD_INLINE VMExecutionContext::iopReqDoc(PC& pc) {
6396 inclOp(this, pc, InclOpFatal | InclOpOnce | InclOpDocRoot);
6399 inline void OPTBLD_INLINE VMExecutionContext::iopEval(PC& pc) {
6400 NEXT();
6401 Cell* c1 = m_stack.topC();
6402 String code(prepareKey(c1));
6403 String prefixedCode = concat("<?php ", code);
6404 Unit* unit = compileEvalString(prefixedCode.get());
6405 if (unit == nullptr) {
6406 raise_error("Syntax error in eval()");
6408 m_stack.popC();
6409 evalUnit(unit, pc, EventHook::Eval);
6412 inline void OPTBLD_INLINE VMExecutionContext::iopDefFunc(PC& pc) {
6413 NEXT();
6414 DECODE_IVA(fid);
6415 Func* f = m_fp->m_func->unit()->lookupFuncId(fid);
6416 f->setCached();
6419 inline void OPTBLD_INLINE VMExecutionContext::iopDefCls(PC& pc) {
6420 NEXT();
6421 DECODE_IVA(cid);
6422 PreClass* c = m_fp->m_func->unit()->lookupPreClassId(cid);
6423 Unit::defClass(c);
6426 inline void OPTBLD_INLINE VMExecutionContext::iopDefTypedef(PC& pc) {
6427 NEXT();
6428 DECODE_IVA(tid);
6429 m_fp->m_func->unit()->defTypedef(tid);
6432 static inline void checkThis(ActRec* fp) {
6433 if (!fp->hasThis()) {
6434 raise_error(Strings::FATAL_NULL_THIS);
6438 inline void OPTBLD_INLINE VMExecutionContext::iopThis(PC& pc) {
6439 NEXT();
6440 checkThis(m_fp);
6441 ObjectData* this_ = m_fp->getThis();
6442 m_stack.pushObject(this_);
6445 inline void OPTBLD_INLINE VMExecutionContext::iopBareThis(PC& pc) {
6446 NEXT();
6447 DECODE(unsigned char, notice);
6448 if (m_fp->hasThis()) {
6449 ObjectData* this_ = m_fp->getThis();
6450 m_stack.pushObject(this_);
6451 } else {
6452 m_stack.pushNull();
6453 if (notice) raise_notice(Strings::WARN_NULL_THIS);
6457 inline void OPTBLD_INLINE VMExecutionContext::iopCheckThis(PC& pc) {
6458 NEXT();
6459 checkThis(m_fp);
6462 inline void OPTBLD_INLINE VMExecutionContext::iopInitThisLoc(PC& pc) {
6463 NEXT();
6464 DECODE_IVA(id);
6465 TypedValue* thisLoc = frame_local(m_fp, id);
6466 tvRefcountedDecRef(thisLoc);
6467 if (m_fp->hasThis()) {
6468 thisLoc->m_data.pobj = m_fp->getThis();
6469 thisLoc->m_type = KindOfObject;
6470 tvIncRef(thisLoc);
6471 } else {
6472 tvWriteUninit(thisLoc);
6477 * Helper for StaticLoc and StaticLocInit.
6479 static inline void
6480 lookupStatic(StringData* name,
6481 const ActRec* fp,
6482 TypedValue*&val, bool& inited) {
6483 HphpArray* map = get_static_locals(fp);
6484 assert(map != nullptr);
6485 val = map->nvGet(name);
6486 if (val == nullptr) {
6487 TypedValue tv;
6488 tvWriteUninit(&tv);
6489 map->set(name, tvAsCVarRef(&tv), false);
6490 val = map->nvGet(name);
6491 inited = false;
6492 } else {
6493 inited = true;
6497 inline void OPTBLD_INLINE VMExecutionContext::iopStaticLoc(PC& pc) {
6498 NEXT();
6499 DECODE_IVA(localId);
6500 DECODE_LITSTR(var);
6501 TypedValue* fr = nullptr;
6502 bool inited;
6503 lookupStatic(var, m_fp, fr, inited);
6504 assert(fr != nullptr);
6505 if (fr->m_type != KindOfRef) {
6506 assert(!inited);
6507 tvBox(fr);
6509 TypedValue* tvLocal = frame_local(m_fp, localId);
6510 tvBind(fr, tvLocal);
6511 if (inited) {
6512 m_stack.pushTrue();
6513 } else {
6514 m_stack.pushFalse();
6518 inline void OPTBLD_INLINE VMExecutionContext::iopStaticLocInit(PC& pc) {
6519 NEXT();
6520 DECODE_IVA(localId);
6521 DECODE_LITSTR(var);
6522 TypedValue* fr = nullptr;
6523 bool inited;
6524 lookupStatic(var, m_fp, fr, inited);
6525 assert(fr != nullptr);
6526 if (!inited) {
6527 Cell* initVal = m_stack.topC();
6528 tvDup(*initVal, *fr);
6530 if (fr->m_type != KindOfRef) {
6531 assert(!inited);
6532 tvBox(fr);
6534 TypedValue* tvLocal = frame_local(m_fp, localId);
6535 tvBind(fr, tvLocal);
6536 m_stack.discard();
6539 inline void OPTBLD_INLINE VMExecutionContext::iopCatch(PC& pc) {
6540 NEXT();
6541 assert(m_faults.size() > 0);
6542 Fault fault = m_faults.back();
6543 m_faults.pop_back();
6544 assert(fault.m_faultType == Fault::Type::UserException);
6545 m_stack.pushObjectNoRc(fault.m_userException);
6548 inline void OPTBLD_INLINE VMExecutionContext::iopLateBoundCls(PC& pc) {
6549 NEXT();
6550 Class* cls = frameStaticClass(m_fp);
6551 if (!cls) {
6552 raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
6554 m_stack.pushClass(cls);
6557 inline void OPTBLD_INLINE VMExecutionContext::iopVerifyParamType(PC& pc) {
6558 SYNC(); // We might need m_pc to be updated to throw.
6559 NEXT();
6561 DECODE_IVA(param);
6562 const Func *func = m_fp->m_func;
6563 assert(param < func->numParams());
6564 assert(func->numParams() == int(func->params().size()));
6565 const TypeConstraint& tc = func->params()[param].typeConstraint();
6566 assert(tc.hasConstraint() || !RuntimeOption::EvalCheckExtendedTypeHints);
6567 const TypedValue *tv = frame_local(m_fp, param);
6568 tc.verify(tv, func, param);
6571 inline void OPTBLD_INLINE VMExecutionContext::iopNativeImpl(PC& pc) {
6572 NEXT();
6573 uint soff = m_fp->m_soff;
6574 BuiltinFunction func = m_fp->m_func->builtinFuncPtr();
6575 assert(func);
6576 // Actually call the native implementation. This will handle freeing the
6577 // locals in the normal case. In the case of an exception, the VM unwinder
6578 // will take care of it.
6579 func(m_fp);
6580 // Adjust the stack; the native implementation put the return value in the
6581 // right place for us already
6582 m_stack.ndiscard(m_fp->m_func->numSlotsInFrame());
6583 ActRec* sfp = m_fp->arGetSfp();
6584 if (LIKELY(sfp != m_fp)) {
6585 // Restore caller's execution state.
6586 m_fp = sfp;
6587 pc = m_fp->m_func->unit()->entry() + m_fp->m_func->base() + soff;
6588 m_stack.ret();
6589 } else {
6590 // No caller; terminate.
6591 m_stack.ret();
6592 #ifdef HPHP_TRACE
6594 std::ostringstream os;
6595 os << toStringElm(m_stack.topTV());
6596 ONTRACE(1,
6597 Trace::trace("Return %s from VMExecutionContext::dispatch("
6598 "%p)\n", os.str().c_str(), m_fp));
6600 #endif
6601 pc = 0;
6605 inline void OPTBLD_INLINE VMExecutionContext::iopHighInvalid(PC& pc) {
6606 fprintf(stderr, "invalid bytecode executed\n");
6607 abort();
6610 inline void OPTBLD_INLINE VMExecutionContext::iopSelf(PC& pc) {
6611 NEXT();
6612 Class* clss = arGetContextClass(m_fp);
6613 if (!clss) {
6614 raise_error(HPHP::Strings::CANT_ACCESS_SELF);
6616 m_stack.pushClass(clss);
6619 inline void OPTBLD_INLINE VMExecutionContext::iopParent(PC& pc) {
6620 NEXT();
6621 Class* clss = arGetContextClass(m_fp);
6622 if (!clss) {
6623 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
6625 Class* parent = clss->parent();
6626 if (!parent) {
6627 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
6629 m_stack.pushClass(parent);
6632 inline void OPTBLD_INLINE VMExecutionContext::iopCreateCl(PC& pc) {
6633 NEXT();
6634 DECODE_IVA(numArgs);
6635 DECODE_LITSTR(clsName);
6636 Class* cls = Unit::loadClass(clsName);
6637 c_Closure* cl = static_cast<c_Closure*>(newInstance(cls));
6638 c_Closure* cl2 = cl->init(numArgs, m_fp, m_stack.top());
6639 m_stack.ndiscard(numArgs);
6640 assert(cl == cl2);
6641 m_stack.pushObject(cl2);
6644 static inline c_Continuation* createCont(const Func* origFunc,
6645 const Func* genFunc) {
6646 auto const cont = c_Continuation::alloc(origFunc, genFunc);
6647 cont->incRefCount();
6648 cont->setNoDestruct();
6650 // The ActRec corresponding to the generator body lives as long as the object
6651 // does. We set it up once, here, and then just change FP to point to it when
6652 // we enter the generator body.
6653 ActRec* ar = cont->actRec();
6654 ar->m_func = genFunc;
6655 ar->initNumArgs(1);
6656 ar->setVarEnv(nullptr);
6658 TypedValue* contLocal = frame_local(ar, 0);
6659 contLocal->m_type = KindOfObject;
6660 contLocal->m_data.pobj = cont;
6661 // Do not incref the continuation here! Doing so will create a reference
6662 // cycle, since this reference is a local in the continuation frame and thus
6663 // will be decreffed when the continuation is destroyed. The corresponding
6664 // non-decref is in ~c_Continuation.
6666 return cont;
6669 c_Continuation*
6670 VMExecutionContext::createContFunc(const Func* origFunc,
6671 const Func* genFunc) {
6672 auto cont = createCont(origFunc, genFunc);
6673 cont->actRec()->setThis(nullptr);
6674 return cont;
6677 c_Continuation*
6678 VMExecutionContext::createContMeth(const Func* origFunc,
6679 const Func* genFunc,
6680 void* objOrCls) {
6681 if (origFunc->isClosureBody()) {
6682 genFunc = genFunc->cloneAndSetClass(origFunc->cls());
6685 auto cont = createCont(origFunc, genFunc);
6686 auto ar = cont->actRec();
6687 ar->setThisOrClass(objOrCls);
6688 if (ar->hasThis()) {
6689 ar->getThis()->incRefCount();
6691 return cont;
6694 static inline void setContVar(const Func* genFunc,
6695 const StringData* name,
6696 TypedValue* src,
6697 c_Continuation* cont) {
6698 Id destId = genFunc->lookupVarId(name);
6699 if (destId != kInvalidId) {
6700 // Copy the value of the local to the cont object and set the
6701 // local to uninit so that we don't need to change refcounts.
6702 tvCopy(*src, *frame_local(cont->actRec(), destId));
6703 tvWriteUninit(src);
6704 } else {
6705 ActRec *contFP = cont->actRec();
6706 if (!contFP->hasVarEnv()) {
6707 // We pass skipInsert to this VarEnv because it's going to exist
6708 // independent of the chain; i.e. we can't stack-allocate it. We link it
6709 // into the chain in UnpackCont, and take it out in ContSuspend.
6710 contFP->setVarEnv(VarEnv::createLocalOnHeap(contFP));
6712 contFP->getVarEnv()->setWithRef(name, src);
6716 static const StaticString s_this("this");
6718 c_Continuation*
6719 VMExecutionContext::fillContinuationVars(ActRec* fp,
6720 const Func* origFunc,
6721 const Func* genFunc,
6722 c_Continuation* cont) {
6723 // For functions that contain only named locals, the variable
6724 // environment is saved and restored by teleporting the values (and
6725 // their references) between the evaluation stack and the local
6726 // space at the end of the object using memcpy. Any variables in a
6727 // VarEnv are saved and restored from m_vars as usual.
6728 static const StringData* thisStr = s_this.get();
6729 bool skipThis;
6730 if (fp->hasVarEnv()) {
6731 Stats::inc(Stats::Cont_CreateVerySlow);
6732 Array definedVariables = fp->getVarEnv()->getDefinedVariables();
6733 skipThis = definedVariables.exists(s_this, true);
6735 for (ArrayIter iter(definedVariables); !iter.end(); iter.next()) {
6736 setContVar(genFunc, iter.first().getStringData(),
6737 const_cast<TypedValue*>(iter.secondRef().asTypedValue()), cont);
6739 } else {
6740 skipThis = origFunc->lookupVarId(thisStr) != kInvalidId;
6741 for (Id i = 0; i < origFunc->numNamedLocals(); ++i) {
6742 setContVar(genFunc, origFunc->localVarName(i),
6743 frame_local(fp, i), cont);
6747 // If $this is used as a local inside the body and is not provided
6748 // by our containing environment, just prefill it here instead of
6749 // using InitThisLoc inside the body
6750 if (!skipThis && fp->hasThis()) {
6751 Id id = genFunc->lookupVarId(thisStr);
6752 if (id != kInvalidId) {
6753 tvAsVariant(frame_local(cont->actRec(), id)) = fp->getThis();
6756 return cont;
6759 inline void OPTBLD_INLINE VMExecutionContext::iopCreateCont(PC& pc) {
6760 NEXT();
6761 DECODE_LITSTR(genName);
6763 const Func* origFunc = m_fp->m_func;
6764 const Func* genFunc = origFunc->getGeneratorBody(genName);
6765 assert(genFunc != nullptr);
6767 c_Continuation* cont = origFunc->isMethod()
6768 ? createContMeth(origFunc, genFunc, m_fp->getThisOrClass())
6769 : createContFunc(origFunc, genFunc);
6771 fillContinuationVars(m_fp, origFunc, genFunc, cont);
6773 TypedValue* ret = m_stack.allocTV();
6774 ret->m_type = KindOfObject;
6775 ret->m_data.pobj = cont;
6778 static inline c_Continuation* frame_continuation(ActRec* fp) {
6779 ObjectData* obj = frame_local(fp, 0)->m_data.pobj;
6780 assert(dynamic_cast<c_Continuation*>(obj));
6781 return static_cast<c_Continuation*>(obj);
6784 static inline c_Continuation* this_continuation(ActRec* fp) {
6785 ObjectData* obj = fp->getThis();
6786 assert(dynamic_cast<c_Continuation*>(obj));
6787 return static_cast<c_Continuation*>(obj);
6790 void VMExecutionContext::iopContEnter(PC& pc) {
6791 NEXT();
6793 // The stack must have one cell! Or else generatorStackBase() won't work!
6794 assert(m_stack.top() + 1 ==
6795 (TypedValue*)m_fp - m_fp->m_func->numSlotsInFrame());
6797 // Do linkage of the continuation's AR.
6798 assert(m_fp->hasThis());
6799 c_Continuation* cont = this_continuation(m_fp);
6800 ActRec* contAR = cont->actRec();
6801 arSetSfp(contAR, m_fp);
6803 contAR->m_soff = m_fp->m_func->unit()->offsetOf(pc)
6804 - (uintptr_t)m_fp->m_func->base();
6805 contAR->m_savedRip =
6806 reinterpret_cast<uintptr_t>(tx()->getRetFromInterpretedGeneratorFrame());
6807 assert(isReturnHelper(contAR->m_savedRip));
6809 m_fp = contAR;
6810 pc = contAR->m_func->getEntry();
6811 SYNC();
6813 if (UNLIKELY(!EventHook::FunctionEnter(contAR, EventHook::NormalFunc))) {
6814 pc = m_pc;
6818 inline void OPTBLD_INLINE VMExecutionContext::iopUnpackCont(PC& pc) {
6819 NEXT();
6820 c_Continuation* cont = frame_continuation(m_fp);
6822 // check sanity of received value
6823 assert(tvIsPlausible(m_stack.topC()));
6825 // Return the label in a stack cell
6826 TypedValue* label = m_stack.allocTV();
6827 label->m_type = KindOfInt64;
6828 label->m_data.num = cont->m_label;
6831 inline void OPTBLD_INLINE VMExecutionContext::iopContSuspend(PC& pc) {
6832 NEXT();
6833 DECODE_IVA(label);
6834 c_Continuation* cont = frame_continuation(m_fp);
6836 cont->c_Continuation::t_update(label, tvAsCVarRef(m_stack.topTV()));
6837 m_stack.popTV();
6839 EventHook::FunctionExit(m_fp);
6840 ActRec* prevFp = m_fp->arGetSfp();
6841 pc = prevFp->m_func->getEntry() + m_fp->m_soff;
6842 m_fp = prevFp;
6845 inline void OPTBLD_INLINE VMExecutionContext::iopContSuspendK(PC& pc) {
6846 NEXT();
6847 DECODE_IVA(label);
6848 c_Continuation* cont = frame_continuation(m_fp);
6850 TypedValue* val = m_stack.topTV();
6851 m_stack.popTV();
6852 cont->c_Continuation::t_update_key(label, tvAsCVarRef(m_stack.topTV()),
6853 tvAsCVarRef(val));
6854 m_stack.popTV();
6856 EventHook::FunctionExit(m_fp);
6857 ActRec* prevFp = m_fp->arGetSfp();
6858 pc = prevFp->m_func->getEntry() + m_fp->m_soff;
6859 m_fp = prevFp;
6862 inline void OPTBLD_INLINE VMExecutionContext::iopContRetC(PC& pc) {
6863 NEXT();
6864 c_Continuation* cont = frame_continuation(m_fp);
6865 cont->setDone();
6866 tvSetIgnoreRef(*m_stack.topC(), *cont->m_value.asTypedValue());
6867 m_stack.popC();
6869 EventHook::FunctionExit(m_fp);
6870 ActRec* prevFp = m_fp->arGetSfp();
6871 pc = prevFp->m_func->getEntry() + m_fp->m_soff;
6872 m_fp = prevFp;
6875 inline void OPTBLD_INLINE VMExecutionContext::iopContCheck(PC& pc) {
6876 NEXT();
6877 DECODE_IVA(check_started);
6878 c_Continuation* cont = this_continuation(m_fp);
6879 if (check_started) {
6880 cont->startedCheck();
6882 cont->preNext();
6885 inline void OPTBLD_INLINE VMExecutionContext::iopContRaise(PC& pc) {
6886 NEXT();
6887 c_Continuation* cont = this_continuation(m_fp);
6888 assert(cont->m_label);
6889 --cont->m_label;
6892 inline void OPTBLD_INLINE VMExecutionContext::iopContValid(PC& pc) {
6893 NEXT();
6894 TypedValue* tv = m_stack.allocTV();
6895 tvWriteUninit(tv);
6896 tvAsVariant(tv) = !this_continuation(m_fp)->done();
6899 inline void OPTBLD_INLINE VMExecutionContext::iopContKey(PC& pc) {
6900 NEXT();
6901 c_Continuation* cont = this_continuation(m_fp);
6902 cont->startedCheck();
6904 TypedValue* tv = m_stack.allocTV();
6905 tvWriteUninit(tv);
6906 tvAsVariant(tv) = cont->m_key;
6909 inline void OPTBLD_INLINE VMExecutionContext::iopContCurrent(PC& pc) {
6910 NEXT();
6911 c_Continuation* cont = this_continuation(m_fp);
6912 cont->startedCheck();
6914 TypedValue* tv = m_stack.allocTV();
6915 tvWriteUninit(tv);
6916 tvAsVariant(tv) = cont->m_value;
6919 inline void OPTBLD_INLINE VMExecutionContext::iopContStopped(PC& pc) {
6920 NEXT();
6921 this_continuation(m_fp)->setStopped();
6924 inline void OPTBLD_INLINE VMExecutionContext::iopContHandle(PC& pc) {
6925 NEXT();
6926 c_Continuation* cont = this_continuation(m_fp);
6927 cont->setDone();
6928 cont->m_value.setNull();
6930 Variant exn = tvAsVariant(m_stack.topTV());
6931 m_stack.popC();
6932 assert(exn.asObjRef().instanceof(SystemLib::s_ExceptionClass));
6933 throw exn.asObjRef();
6936 inline void OPTBLD_INLINE VMExecutionContext::iopStrlen(PC& pc) {
6937 NEXT();
6938 TypedValue* subj = m_stack.topTV();
6939 if (LIKELY(IS_STRING_TYPE(subj->m_type))) {
6940 int64_t ans = subj->m_data.pstr->size();
6941 tvRefcountedDecRef(subj);
6942 subj->m_type = KindOfInt64;
6943 subj->m_data.num = ans;
6944 } else {
6945 Variant ans = f_strlen(tvAsVariant(subj));
6946 tvAsVariant(subj) = ans;
6950 inline void OPTBLD_INLINE VMExecutionContext::iopIncStat(PC& pc) {
6951 NEXT();
6952 DECODE_IVA(counter);
6953 DECODE_IVA(value);
6954 Stats::inc(Stats::StatCounter(counter), value);
6957 void VMExecutionContext::classExistsImpl(PC& pc, Attr typeAttr) {
6958 NEXT();
6959 TypedValue* aloadTV = m_stack.topTV();
6960 tvCastToBooleanInPlace(aloadTV);
6961 assert(aloadTV->m_type == KindOfBoolean);
6962 bool autoload = aloadTV->m_data.num;
6963 m_stack.popX();
6965 TypedValue* name = m_stack.topTV();
6966 tvCastToStringInPlace(name);
6967 assert(IS_STRING_TYPE(name->m_type));
6969 tvAsVariant(name) = Unit::classExists(name->m_data.pstr, autoload, typeAttr);
6972 inline void OPTBLD_INLINE VMExecutionContext::iopClassExists(PC& pc) {
6973 classExistsImpl(pc, AttrNone);
6976 inline void OPTBLD_INLINE VMExecutionContext::iopInterfaceExists(PC& pc) {
6977 classExistsImpl(pc, AttrInterface);
6980 inline void OPTBLD_INLINE VMExecutionContext::iopTraitExists(PC& pc) {
6981 classExistsImpl(pc, AttrTrait);
6984 string
6985 VMExecutionContext::prettyStack(const string& prefix) const {
6986 if (!getFP()) {
6987 string s("__Halted");
6988 return s;
6990 int offset = (m_fp->m_func->unit() != nullptr)
6991 ? pcOff()
6992 : 0;
6993 string begPrefix = prefix + "__";
6994 string midPrefix = prefix + "|| ";
6995 string endPrefix = prefix + "\\/";
6996 string stack = m_stack.toString(m_fp, offset, midPrefix);
6997 return begPrefix + "\n" + stack + endPrefix;
7000 void VMExecutionContext::checkRegStateWork() const {
7001 assert(Transl::tl_regState == Transl::VMRegState::CLEAN);
7004 void VMExecutionContext::DumpStack() {
7005 string s = g_vmContext->prettyStack("");
7006 fprintf(stderr, "%s\n", s.c_str());
7009 void VMExecutionContext::DumpCurUnit(int skip) {
7010 ActRec* fp = g_vmContext->getFP();
7011 Offset pc = fp->m_func->unit() ? g_vmContext->pcOff() : 0;
7012 while (skip--) {
7013 fp = g_vmContext->getPrevVMState(fp, &pc);
7015 if (fp == nullptr) {
7016 std::cout << "Don't have a valid fp\n";
7017 return;
7020 printf("Offset = %d, in function %s\n", pc, fp->m_func->name()->data());
7021 Unit* u = fp->m_func->unit();
7022 if (u == nullptr) {
7023 std::cout << "Current unit is NULL\n";
7024 return;
7026 printf("Dumping bytecode for %s(%p)\n", u->filepath()->data(), u);
7027 std::cout << u->toString();
7030 void VMExecutionContext::PrintTCCallerInfo() {
7031 VMRegAnchor _;
7032 ActRec* fp = g_vmContext->getFP();
7033 Unit* u = fp->m_func->unit();
7034 fprintf(stderr, "Called from TC address %p\n",
7035 tx()->getTranslatedCaller());
7036 std::cerr << u->filepath()->data() << ':'
7037 << u->getLineNumber(u->offsetOf(g_vmContext->getPC())) << std::endl;
7040 static inline void
7041 condStackTraceSep(const char* pfx) {
7042 TRACE(3, "%s"
7043 "========================================"
7044 "========================================\n",
7045 pfx);
7048 #define COND_STACKTRACE(pfx) \
7049 ONTRACE(3, \
7050 string stack = prettyStack(pfx); \
7051 Trace::trace("%s\n", stack.c_str());)
7053 #define O(name, imm, pusph, pop, flags) \
7054 void VMExecutionContext::op##name() { \
7055 condStackTraceSep("op"#name" "); \
7056 COND_STACKTRACE("op"#name" pre: "); \
7057 PC pc = m_pc; \
7058 assert(toOp(*pc) == Op##name); \
7059 ONTRACE(1, \
7060 int offset = m_fp->m_func->unit()->offsetOf(pc); \
7061 Trace::trace("op"#name" offset: %d\n", offset)); \
7062 iop##name(pc); \
7063 SYNC(); \
7064 COND_STACKTRACE("op"#name" post: "); \
7065 condStackTraceSep("op"#name" "); \
7067 OPCODES
7068 #undef O
7069 #undef NEXT
7070 #undef DECODE_JMP
7071 #undef DECODE
7073 static inline void
7074 profileReturnValue(const DataType dt) {
7075 const Func* f = curFunc();
7076 if (f->isPseudoMain() || f->isClosureBody() || f->isMagic() ||
7077 Func::isSpecial(f->name()))
7078 return;
7079 recordType(TypeProfileKey(TypeProfileKey::MethodName, f->name()), dt);
7082 template <int dispatchFlags>
7083 inline void VMExecutionContext::dispatchImpl(int numInstrs) {
7084 static const bool limInstrs = dispatchFlags & LimitInstrs;
7085 static const bool breakOnCtlFlow = dispatchFlags & BreakOnCtlFlow;
7086 static const bool profile = dispatchFlags & Profile;
7087 static const void *optabDirect[] = {
7088 #define O(name, imm, push, pop, flags) \
7089 &&Label##name,
7090 OPCODES
7091 #undef O
7093 static const void *optabDbg[] = {
7094 #define O(name, imm, push, pop, flags) \
7095 &&LabelDbg##name,
7096 OPCODES
7097 #undef O
7099 static const void *optabCover[] = {
7100 #define O(name, imm, push, pop, flags) \
7101 &&LabelCover##name,
7102 OPCODES
7103 #undef O
7105 assert(sizeof(optabDirect) / sizeof(const void *) == Op_count);
7106 assert(sizeof(optabDbg) / sizeof(const void *) == Op_count);
7107 const void **optab = optabDirect;
7108 bool collectCoverage = ThreadInfo::s_threadInfo->
7109 m_reqInjectionData.getCoverage();
7110 if (collectCoverage) {
7111 optab = optabCover;
7113 DEBUGGER_ATTACHED_ONLY(optab = optabDbg);
7115 * Trace-only mapping of opcodes to names.
7117 #ifdef HPHP_TRACE
7118 static const char *nametab[] = {
7119 #define O(name, imm, push, pop, flags) \
7120 #name,
7121 OPCODES
7122 #undef O
7124 #endif /* HPHP_TRACE */
7125 bool isCtlFlow = false;
7127 #define DISPATCH() do { \
7128 if ((breakOnCtlFlow && isCtlFlow) || \
7129 (limInstrs && UNLIKELY(numInstrs-- == 0))) { \
7130 ONTRACE(1, \
7131 Trace::trace("dispatch: Halt ExecutionContext::dispatch(%p)\n", \
7132 m_fp)); \
7133 return; \
7135 Op op = toOp(*pc); \
7136 COND_STACKTRACE("dispatch: "); \
7137 ONTRACE(1, \
7138 Trace::trace("dispatch: %d: %s\n", pcOff(), \
7139 nametab[uint8_t(op)])); \
7140 if (profile && (op == OpRetC || op == OpRetV)) { \
7141 profileReturnValue(m_stack.top()->m_type); \
7143 goto *optab[uint8_t(op)]; \
7144 } while (0)
7146 ONTRACE(1, Trace::trace("dispatch: Enter ExecutionContext::dispatch(%p)\n",
7147 m_fp));
7148 PC pc = m_pc;
7149 DISPATCH();
7151 #define O(name, imm, pusph, pop, flags) \
7152 LabelDbg##name: \
7153 phpDebuggerOpcodeHook(pc); \
7154 LabelCover##name: \
7155 if (collectCoverage) { \
7156 recordCodeCoverage(pc); \
7158 Label##name: { \
7159 iop##name(pc); \
7160 SYNC(); \
7161 if (breakOnCtlFlow) { \
7162 isCtlFlow = instrIsControlFlow(Op::name); \
7163 Stats::incOp(Op::name); \
7165 const Op op = Op::name; \
7166 if (op == OpRetC || op == OpRetV || op == OpNativeImpl) { \
7167 if (UNLIKELY(!pc)) { m_fp = 0; return; } \
7169 DISPATCH(); \
7171 OPCODES
7172 #undef O
7173 #undef DISPATCH
7176 void VMExecutionContext::dispatch() {
7177 if (shouldProfile()) {
7178 dispatchImpl<Profile>(0);
7179 } else {
7180 dispatchImpl<0>(0);
7184 void VMExecutionContext::dispatchN(int numInstrs) {
7185 dispatchImpl<LimitInstrs | BreakOnCtlFlow>(numInstrs);
7186 // We are about to go back to Jit, check whether we should
7187 // stick with interpreter
7188 if (DEBUGGER_FORCE_INTR) {
7189 throw VMSwitchMode();
7193 void VMExecutionContext::dispatchBB() {
7194 dispatchImpl<BreakOnCtlFlow>(0);
7195 // We are about to go back to Jit, check whether we should
7196 // stick with interpreter
7197 if (DEBUGGER_FORCE_INTR) {
7198 throw VMSwitchMode();
7202 void VMExecutionContext::recordCodeCoverage(PC pc) {
7203 Unit* unit = getFP()->m_func->unit();
7204 assert(unit != nullptr);
7205 if (unit == SystemLib::s_nativeFuncUnit ||
7206 unit == SystemLib::s_nativeClassUnit ||
7207 unit == SystemLib::s_hhas_unit) {
7208 return;
7210 int line = unit->getLineNumber(pcOff());
7211 assert(line != -1);
7213 if (unit != m_coverPrevUnit || line != m_coverPrevLine) {
7214 ThreadInfo* info = ThreadInfo::s_threadInfo.getNoCheck();
7215 m_coverPrevUnit = unit;
7216 m_coverPrevLine = line;
7217 const StringData* filepath = unit->filepath();
7218 assert(filepath->isStatic());
7219 info->m_coverage->Record(filepath->data(), line, line);
7223 void VMExecutionContext::resetCoverageCounters() {
7224 m_coverPrevLine = -1;
7225 m_coverPrevUnit = nullptr;
7228 void VMExecutionContext::pushVMState(VMState &savedVM,
7229 const ActRec* reentryAR) {
7230 if (debug && savedVM.fp &&
7231 savedVM.fp->m_func &&
7232 savedVM.fp->m_func->unit()) {
7233 // Some asserts and tracing.
7234 const Func* func = savedVM.fp->m_func;
7235 (void) /* bound-check asserts in offsetOf */
7236 func->unit()->offsetOf(savedVM.pc);
7237 TRACE(3, "pushVMState: saving frame %s pc %p off %d fp %p\n",
7238 func->name()->data(),
7239 savedVM.pc,
7240 func->unit()->offsetOf(savedVM.pc),
7241 savedVM.fp);
7243 m_nestedVMs.push_back(ReentryRecord(savedVM, reentryAR));
7244 m_nesting++;
7247 void VMExecutionContext::popVMState() {
7248 assert(m_nestedVMs.size() >= 1);
7250 VMState &savedVM = m_nestedVMs.back().m_savedState;
7251 m_pc = savedVM.pc;
7252 m_fp = savedVM.fp;
7253 m_firstAR = savedVM.firstAR;
7254 assert(m_stack.top() == savedVM.sp);
7256 if (debug) {
7257 if (savedVM.fp &&
7258 savedVM.fp->m_func &&
7259 savedVM.fp->m_func->unit()) {
7260 const Func* func = savedVM.fp->m_func;
7261 (void) /* bound-check asserts in offsetOf */
7262 func->unit()->offsetOf(savedVM.pc);
7263 TRACE(3, "popVMState: restoring frame %s pc %p off %d fp %p\n",
7264 func->name()->data(),
7265 savedVM.pc,
7266 func->unit()->offsetOf(savedVM.pc),
7267 savedVM.fp);
7271 m_nestedVMs.pop_back();
7272 m_nesting--;
7275 void VMExecutionContext::requestInit() {
7276 assert(SystemLib::s_unit);
7277 assert(SystemLib::s_nativeFuncUnit);
7278 assert(SystemLib::s_nativeClassUnit);
7280 new (&s_requestArenaStorage) RequestArena();
7281 new (&s_varEnvArenaStorage) VarEnvArena();
7283 EnvConstants::requestInit(new (request_arena()) EnvConstants());
7284 VarEnv::createGlobal();
7285 m_stack.requestInit();
7286 Transl::Translator::advanceTranslator();
7287 tx()->requestInit();
7289 if (UNLIKELY(RuntimeOption::EvalJitEnableRenameFunction)) {
7290 SystemLib::s_unit->merge();
7291 if (SystemLib::s_hhas_unit) SystemLib::s_hhas_unit->merge();
7292 SystemLib::s_nativeFuncUnit->merge();
7293 SystemLib::s_nativeClassUnit->merge();
7294 } else {
7295 // System units are always merge only, and
7296 // everything is persistent.
7297 assert(SystemLib::s_unit->isEmpty());
7298 assert(!SystemLib::s_hhas_unit || SystemLib::s_hhas_unit->isEmpty());
7299 assert(SystemLib::s_nativeFuncUnit->isEmpty());
7300 assert(SystemLib::s_nativeClassUnit->isEmpty());
7303 profileRequestStart();
7305 #ifdef DEBUG
7306 Class* cls = Unit::GetNamedEntity(s_stdclass.get())->clsList();
7307 assert(cls);
7308 assert(cls == SystemLib::s_stdclassClass);
7309 #endif
7312 void VMExecutionContext::requestExit() {
7313 treadmillSharedVars();
7314 destructObjects();
7315 syncGdbState();
7316 tx()->requestExit();
7317 Transl::Translator::clearTranslator();
7318 m_stack.requestExit();
7319 profileRequestEnd();
7320 EventHook::Disable();
7321 EnvConstants::requestExit();
7323 if (m_globalVarEnv) {
7324 VarEnv::destroy(m_globalVarEnv);
7325 m_globalVarEnv = 0;
7328 varenv_arena().~VarEnvArena();
7329 request_arena().~RequestArena();
7332 ///////////////////////////////////////////////////////////////////////////////