Allow early initialization of classes if they don't have any [sp]init methods
[hiphop-php.git] / hphp / runtime / vm / bytecode.cpp
blob7a281abdf077705003a7bc7c6044952870760d3b
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2013 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/bytecode.h"
18 #include "hphp/compiler/builtin_symbols.h"
19 #include "hphp/runtime/vm/event_hook.h"
20 #include "hphp/runtime/vm/jit/translator-x64.h"
21 #include "hphp/runtime/vm/srckey.h"
22 #include "hphp/runtime/vm/member_operations.h"
23 #include "hphp/runtime/base/code_coverage.h"
24 #include "hphp/runtime/base/file_repository.h"
25 #include "hphp/runtime/base/base_includes.h"
26 #include "hphp/runtime/base/execution_context.h"
27 #include "hphp/runtime/base/runtime_option.h"
28 #include "hphp/runtime/base/array/hphp_array.h"
29 #include "hphp/runtime/base/strings.h"
30 #include "hphp/util/util.h"
31 #include "hphp/util/trace.h"
32 #include "hphp/util/debug.h"
33 #include "hphp/runtime/base/stat_cache.h"
34 #include "hphp/runtime/base/shared/shared_variant.h"
36 #include "hphp/runtime/vm/treadmill.h"
37 #include "hphp/runtime/vm/php_debug.h"
38 #include "hphp/runtime/vm/debugger_hook.h"
39 #include "hphp/runtime/vm/runtime.h"
40 #include "hphp/runtime/vm/jit/targetcache.h"
41 #include "hphp/runtime/vm/type_constraint.h"
42 #include "hphp/runtime/vm/jit/translator-inline.h"
43 #include "hphp/runtime/ext/ext_string.h"
44 #include "hphp/runtime/ext/ext_error.h"
45 #include "hphp/runtime/ext/ext_closure.h"
46 #include "hphp/runtime/ext/ext_continuation.h"
47 #include "hphp/runtime/ext/ext_function.h"
48 #include "hphp/runtime/ext/ext_variable.h"
49 #include "hphp/runtime/ext/ext_array.h"
50 #include "hphp/runtime/base/stats.h"
51 #include "hphp/runtime/vm/type_profile.h"
52 #include "hphp/runtime/base/server/source_root_info.h"
53 #include "hphp/runtime/base/util/extended_logger.h"
55 #include "hphp/system/lib/systemlib.h"
56 #include "hphp/runtime/ext/ext_collections.h"
58 #include "hphp/runtime/vm/name_value_table_wrapper.h"
59 #include "hphp/runtime/vm/request_arena.h"
60 #include "hphp/util/arena.h"
62 #include <iostream>
63 #include <iomanip>
64 #include <algorithm>
65 #include <boost/format.hpp>
66 #include <boost/utility/typed_in_place_factory.hpp>
68 #include <cinttypes>
70 #include <libgen.h>
71 #include <sys/mman.h>
73 namespace HPHP {
75 // TODO: #1746957, #1756122
76 // we should skip the call in call_user_func_array, if
77 // by reference params are passed by value, or if its
78 // argument is not an array, but currently lots of tests
79 // depend on actually making the call.
80 const bool skipCufOnInvalidParams = false;
82 // RepoAuthoritative has been raptured out of runtime_option.cpp. It needs
83 // to be closer to other bytecode.cpp data.
84 bool RuntimeOption::RepoAuthoritative = false;
86 using std::string;
88 using Transl::tx64;
90 #if DEBUG
91 #define OPTBLD_INLINE
92 #else
93 #define OPTBLD_INLINE ALWAYS_INLINE
94 #endif
95 static const Trace::Module TRACEMOD = Trace::bcinterp;
97 namespace {
99 struct VMPrepareUnwind : std::exception {
100 const char* what() const throw() { return "VMPrepareUnwind"; }
105 ActRec* ActRec::arGetSfp() const {
106 ActRec* prevFrame = (ActRec*)m_savedRbp;
107 if (LIKELY(((uintptr_t)prevFrame - Util::s_stackLimit) >=
108 Util::s_stackSize)) {
109 if (LIKELY(prevFrame != nullptr)) return prevFrame;
112 return const_cast<ActRec*>(this);
115 bool
116 ActRec::skipFrame() const {
117 return m_func && m_func->skipFrame();
120 template <>
121 Class* arGetContextClassImpl<false>(const ActRec* ar) {
122 if (ar == nullptr) {
123 return nullptr;
125 return ar->m_func->cls();
128 template <>
129 Class* arGetContextClassImpl<true>(const ActRec* ar) {
130 if (ar == nullptr) {
131 return nullptr;
133 if (ar->m_func->isPseudoMain() || ar->m_func->isBuiltin()) {
134 // Pseudomains inherit the context of their caller
135 VMExecutionContext* context = g_vmContext;
136 ar = context->getPrevVMState(ar);
137 while (ar != nullptr &&
138 (ar->m_func->isPseudoMain() || ar->m_func->isBuiltin())) {
139 ar = context->getPrevVMState(ar);
141 if (ar == nullptr) {
142 return nullptr;
145 return ar->m_func->cls();
148 const StaticString s_call_user_func("call_user_func");
149 const StaticString s_call_user_func_array("call_user_func_array");
150 const StaticString s_hphpd_break("hphpd_break");
151 const StaticString s_fb_enable_code_coverage("fb_enable_code_coverage");
152 const StaticString s_stdclass("stdclass");
153 const StaticString s___call("__call");
154 const StaticString s___callStatic("__callStatic");
155 const StaticString s_file("file");
156 const StaticString s_line("line");
157 const StaticString s_function("function");
158 const StaticString s_args("args");
159 const StaticString s_class("class");
160 const StaticString s_object("object");
161 const StaticString s_type("type");
162 const StaticString s_include("include");
164 ///////////////////////////////////////////////////////////////////////////////
166 //=============================================================================
167 // Miscellaneous macros.
169 #define NEXT() pc++
170 #define DECODE_JMP(type, var) \
171 type var __attribute__((unused)) = *(type*)pc; \
172 ONTRACE(2, \
173 Trace::trace("decode: Immediate %s %" PRIi64"\n", #type, \
174 (int64_t)var));
175 #define ITER_SKIP(offset) pc = origPc + (offset);
177 #define DECODE(type, var) \
178 DECODE_JMP(type, var); \
179 pc += sizeof(type)
180 #define DECODE_IVA(var) \
181 int32_t var UNUSED = decodeVariableSizeImm(&pc); \
182 ONTRACE(2, \
183 Trace::trace("decode: Immediate int32 %" PRIi64"\n", \
184 (int64_t)var));
185 #define DECODE_LITSTR(var) \
186 StringData* var; \
187 do { \
188 DECODE(Id, id); \
189 var = m_fp->m_func->unit()->lookupLitstrId(id); \
190 } while (false)
192 #define DECODE_HA(var) DECODE_IVA(var)
193 #define DECODE_IA(var) DECODE_IVA(var)
195 #define SYNC() m_pc = pc
197 //=============================================================================
198 // Miscellaneous helpers.
200 static inline Class* frameStaticClass(ActRec* fp) {
201 if (fp->hasThis()) {
202 return fp->getThis()->getVMClass();
203 } else if (fp->hasClass()) {
204 return fp->getClass();
205 } else {
206 return nullptr;
210 //=============================================================================
211 // VarEnv.
213 VarEnv::VarEnv()
214 : m_depth(0)
215 , m_malloced(false)
216 , m_cfp(0)
217 , m_previous(0)
218 , m_nvTable(boost::in_place<NameValueTable>(
219 RuntimeOption::EvalVMInitialGlobalTableSize))
221 TypedValue globalArray;
222 globalArray.m_type = KindOfArray;
223 globalArray.m_data.parr =
224 new (request_arena()) GlobalNameValueTableWrapper(&*m_nvTable);
225 globalArray.m_data.parr->incRefCount();
226 m_nvTable->set(StringData::GetStaticString("GLOBALS"), &globalArray);
227 tvRefcountedDecRef(&globalArray);
230 VarEnv::VarEnv(ActRec* fp, ExtraArgs* eArgs)
231 : m_extraArgs(eArgs)
232 , m_depth(1)
233 , m_malloced(false)
234 , m_cfp(fp)
236 const Func* func = fp->m_func;
237 const Id numNames = func->numNamedLocals();
239 if (!numNames) return;
241 m_nvTable = boost::in_place<NameValueTable>(numNames);
243 TypedValue** origLocs =
244 reinterpret_cast<TypedValue**>(uintptr_t(this) + sizeof(VarEnv));
245 TypedValue* loc = frame_local(fp, 0);
246 for (Id i = 0; i < numNames; ++i, --loc) {
247 assert(func->lookupVarId(func->localVarName(i)) == (int)i);
248 origLocs[i] = m_nvTable->migrateSet(func->localVarName(i), loc);
252 VarEnv::~VarEnv() {
253 TRACE(3, "Destroying VarEnv %p [%s]\n",
254 this,
255 isGlobalScope() ? "global scope" : "local scope");
256 assert(m_restoreLocations.empty());
257 if (g_vmContext->m_topVarEnv == this) {
258 g_vmContext->m_topVarEnv = m_previous;
261 if (!isGlobalScope()) {
262 if (LIKELY(!m_malloced)) {
263 varenv_arena().endFrame();
264 return;
266 } else {
268 * When detaching the global scope, we leak any live objects (and
269 * let the smart allocator clean them up). This is because we're
270 * not supposed to run destructors for objects that are live at
271 * the end of a request.
273 m_nvTable->leak();
277 VarEnv* VarEnv::createLazyAttach(ActRec* fp,
278 bool skipInsert /* = false */) {
279 const Func* func = fp->m_func;
280 const size_t numNames = func->numNamedLocals();
281 ExtraArgs* eArgs = fp->getExtraArgs();
282 const size_t neededSz = sizeof(VarEnv) +
283 sizeof(TypedValue*) * numNames;
285 TRACE(3, "Creating lazily attached VarEnv\n");
287 if (LIKELY(!skipInsert)) {
288 auto& va = varenv_arena();
289 va.beginFrame();
290 void* mem = va.alloc(neededSz);
291 VarEnv* ret = new (mem) VarEnv(fp, eArgs);
292 TRACE(3, "Creating lazily attached VarEnv %p\n", mem);
293 ret->setPrevious(g_vmContext->m_topVarEnv);
294 g_vmContext->m_topVarEnv = ret;
295 return ret;
299 * For skipInsert == true, we're adding a VarEnv in the middle of
300 * the chain, which means we can't use the stack allocation.
302 * The caller must immediately setPrevious, so don't bother setting
303 * it to an invalid pointer except in a debug build.
305 void* mem = malloc(neededSz);
306 VarEnv* ret = new (mem) VarEnv(fp, eArgs);
307 ret->m_malloced = true;
308 if (debug) {
309 ret->setPrevious((VarEnv*)-1);
311 return ret;
314 VarEnv* VarEnv::createGlobal() {
315 assert(!g_vmContext->m_globalVarEnv);
316 assert(!g_vmContext->m_topVarEnv);
318 VarEnv* ret = new (request_arena()) VarEnv();
319 TRACE(3, "Creating VarEnv %p [global scope]\n", ret);
320 g_vmContext->m_globalVarEnv = g_vmContext->m_topVarEnv = ret;
321 return ret;
324 void VarEnv::destroy(VarEnv* ve) {
325 bool malloced = ve->m_malloced;
326 ve->~VarEnv();
327 if (UNLIKELY(malloced)) free(ve);
330 void VarEnv::attach(ActRec* fp) {
331 TRACE(3, "Attaching VarEnv %p [%s] %d fp @%p\n",
332 this,
333 isGlobalScope() ? "global scope" : "local scope",
334 int(fp->m_func->numNamedLocals()), fp);
335 assert(m_depth == 0 || fp->arGetSfp() == m_cfp ||
336 (fp->arGetSfp() == fp && g_vmContext->isNested()));
337 m_cfp = fp;
338 m_depth++;
340 // Overlay fp's locals, if it has any.
342 const Func* func = fp->m_func;
343 const Id numNames = func->numNamedLocals();
344 if (!numNames) {
345 return;
347 if (!m_nvTable) {
348 m_nvTable = boost::in_place<NameValueTable>(numNames);
351 TypedValue** origLocs = new (varenv_arena()) TypedValue*[
352 func->numNamedLocals()];
353 TypedValue* loc = frame_local(fp, 0);
354 for (Id i = 0; i < numNames; ++i, --loc) {
355 assert(func->lookupVarId(func->localVarName(i)) == (int)i);
356 origLocs[i] = m_nvTable->migrate(func->localVarName(i), loc);
358 m_restoreLocations.push_back(origLocs);
361 void VarEnv::detach(ActRec* fp) {
362 TRACE(3, "Detaching VarEnv %p [%s] @%p\n",
363 this,
364 isGlobalScope() ? "global scope" : "local scope",
365 fp);
366 assert(fp == m_cfp);
367 assert(m_depth > 0);
369 // Merge/remove fp's overlaid locals, if it had any.
370 const Func* func = fp->m_func;
371 if (Id const numLocals = func->numNamedLocals()) {
373 * In the case of a lazily attached VarEnv, we have our locations
374 * for the first (lazy) attach stored immediately following the
375 * VarEnv in memory. In this case m_restoreLocations will be empty.
377 assert((!isGlobalScope() && m_depth == 1) == m_restoreLocations.empty());
378 TypedValue** origLocs =
379 !m_restoreLocations.empty()
380 ? m_restoreLocations.back()
381 : reinterpret_cast<TypedValue**>(uintptr_t(this) + sizeof(VarEnv));
383 for (Id i = 0; i < numLocals; i++) {
384 m_nvTable->resettle(func->localVarName(i), origLocs[i]);
386 if (!m_restoreLocations.empty()) {
387 m_restoreLocations.pop_back();
391 VMExecutionContext* context = g_vmContext;
392 m_cfp = context->getPrevVMState(fp);
393 m_depth--;
394 if (m_depth == 0) {
395 m_cfp = nullptr;
396 // don't free global varEnv
397 if (context->m_globalVarEnv != this) {
398 assert(!isGlobalScope());
399 destroy(this);
404 // This helper is creating a NVT because of dynamic variable accesses,
405 // even though we're already attached to a frame and it had no named
406 // locals.
407 void VarEnv::ensureNvt() {
408 const size_t kLazyNvtSize = 3;
409 if (!m_nvTable) {
410 m_nvTable = boost::in_place<NameValueTable>(kLazyNvtSize);
414 void VarEnv::set(const StringData* name, TypedValue* tv) {
415 ensureNvt();
416 m_nvTable->set(name, tv);
419 void VarEnv::bind(const StringData* name, TypedValue* tv) {
420 ensureNvt();
421 m_nvTable->bind(name, tv);
424 void VarEnv::setWithRef(const StringData* name, TypedValue* tv) {
425 if (tv->m_type == KindOfRef) {
426 bind(name, tv);
427 } else {
428 set(name, tv);
432 TypedValue* VarEnv::lookup(const StringData* name) {
433 if (!m_nvTable) {
434 return 0;
436 return m_nvTable->lookup(name);
439 TypedValue* VarEnv::lookupAdd(const StringData* name) {
440 ensureNvt();
441 return m_nvTable->lookupAdd(name);
444 TypedValue* VarEnv::lookupRawPointer(const StringData* name) {
445 ensureNvt();
446 return m_nvTable->lookupRawPointer(name);
449 TypedValue* VarEnv::lookupAddRawPointer(const StringData* name) {
450 ensureNvt();
451 return m_nvTable->lookupAddRawPointer(name);
454 bool VarEnv::unset(const StringData* name) {
455 if (!m_nvTable) return true;
456 m_nvTable->unset(name);
457 return true;
460 Array VarEnv::getDefinedVariables() const {
461 Array ret = Array::Create();
463 if (!m_nvTable) return ret;
465 NameValueTable::Iterator iter(&*m_nvTable);
466 for (; iter.valid(); iter.next()) {
467 const StringData* sd = iter.curKey();
468 const TypedValue* tv = iter.curVal();
469 if (tvAsCVarRef(tv).isReferenced()) {
470 ret.setRef(StrNR(sd).asString(), tvAsCVarRef(tv));
471 } else {
472 ret.add(StrNR(sd).asString(), tvAsCVarRef(tv));
476 return ret;
479 TypedValue* VarEnv::getExtraArg(unsigned argInd) const {
480 return m_extraArgs->getExtraArg(argInd);
483 //=============================================================================
485 ExtraArgs::ExtraArgs() {}
486 ExtraArgs::~ExtraArgs() {}
488 void* ExtraArgs::allocMem(unsigned nargs) {
489 return smart_malloc(sizeof(TypedValue) * nargs + sizeof(ExtraArgs));
492 ExtraArgs* ExtraArgs::allocateCopy(TypedValue* args, unsigned nargs) {
493 void* mem = allocMem(nargs);
494 ExtraArgs* ea = new (mem) ExtraArgs();
497 * The stack grows downward, so the args in memory are "backward"; i.e. the
498 * leftmost (in PHP) extra arg is highest in memory.
500 std::reverse_copy(args, args + nargs, &ea->m_extraArgs[0]);
501 return ea;
504 ExtraArgs* ExtraArgs::allocateUninit(unsigned nargs) {
505 void* mem = ExtraArgs::allocMem(nargs);
506 return new (mem) ExtraArgs();
509 void ExtraArgs::deallocate(ExtraArgs* ea, unsigned nargs) {
510 assert(nargs > 0);
512 for (unsigned i = 0; i < nargs; ++i) {
513 tvRefcountedDecRef(ea->m_extraArgs + i);
515 ea->~ExtraArgs();
516 smart_free(ea);
519 void ExtraArgs::deallocate(ActRec* ar) {
520 const int numExtra = ar->numArgs() - ar->m_func->numParams();
521 deallocate(ar->getExtraArgs(), numExtra);
524 TypedValue* ExtraArgs::getExtraArg(unsigned argInd) const {
525 return const_cast<TypedValue*>(&m_extraArgs[argInd]);
528 //=============================================================================
529 // Stack.
531 // Store actual stack elements array in a thread-local in order to amortize the
532 // cost of allocation.
533 class StackElms {
534 public:
535 StackElms() : m_elms(nullptr) {}
536 ~StackElms() {
537 flush();
539 TypedValue* elms() {
540 if (m_elms == nullptr) {
541 // RuntimeOption::EvalVMStackElms-sized and -aligned.
542 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
543 if (posix_memalign((void**)&m_elms, algnSz, algnSz) != 0) {
544 throw std::runtime_error(
545 std::string("VM stack initialization failed: ") + strerror(errno));
548 return m_elms;
550 void flush() {
551 if (m_elms != nullptr) {
552 free(m_elms);
553 m_elms = nullptr;
556 private:
557 TypedValue* m_elms;
559 IMPLEMENT_THREAD_LOCAL(StackElms, t_se);
561 const int Stack::sSurprisePageSize = sysconf(_SC_PAGESIZE);
562 // We reserve the bottom page of each stack for use as the surprise
563 // page, so the minimum useful stack size is the next power of two.
564 const uint Stack::sMinStackElms = 2 * sSurprisePageSize / sizeof(TypedValue);
566 void Stack::ValidateStackSize() {
567 if (RuntimeOption::EvalVMStackElms < sMinStackElms) {
568 throw std::runtime_error(str(
569 boost::format("VM stack size of 0x%llx is below the minimum of 0x%x")
570 % RuntimeOption::EvalVMStackElms
571 % sMinStackElms));
573 if (!Util::isPowerOfTwo(RuntimeOption::EvalVMStackElms)) {
574 throw std::runtime_error(str(
575 boost::format("VM stack size of 0x%llx is not a power of 2")
576 % RuntimeOption::EvalVMStackElms));
580 Stack::Stack()
581 : m_elms(nullptr), m_top(nullptr), m_base(nullptr) {
584 Stack::~Stack() {
585 requestExit();
588 void
589 Stack::protect() {
590 if (trustSigSegv) {
591 mprotect(m_elms, sizeof(void*), PROT_NONE);
595 void
596 Stack::unprotect() {
597 if (trustSigSegv) {
598 mprotect(m_elms, sizeof(void*), PROT_READ | PROT_WRITE);
602 void
603 Stack::requestInit() {
604 m_elms = t_se->elms();
605 if (trustSigSegv) {
606 RequestInjectionData& data = ThreadInfo::s_threadInfo->m_reqInjectionData;
607 Lock l(data.surpriseLock);
608 assert(data.surprisePage == nullptr);
609 data.surprisePage = m_elms;
611 // Burn one element of the stack, to satisfy the constraint that
612 // valid m_top values always have the same high-order (>
613 // log(RuntimeOption::EvalVMStackElms)) bits.
614 m_top = m_base = m_elms + RuntimeOption::EvalVMStackElms - 1;
616 // Because of the surprise page at the bottom of the stack we lose an
617 // additional 256 elements which must be taken into account when checking for
618 // overflow.
619 UNUSED size_t maxelms =
620 RuntimeOption::EvalVMStackElms - sSurprisePageSize / sizeof(TypedValue);
621 assert(!wouldOverflow(maxelms - 1));
622 assert(wouldOverflow(maxelms));
624 // Reset permissions on our stack's surprise page
625 unprotect();
628 void
629 Stack::requestExit() {
630 if (m_elms != nullptr) {
631 if (trustSigSegv) {
632 RequestInjectionData& data = ThreadInfo::s_threadInfo->m_reqInjectionData;
633 Lock l(data.surpriseLock);
634 assert(data.surprisePage == m_elms);
635 unprotect();
636 data.surprisePage = nullptr;
638 m_elms = nullptr;
642 void flush_evaluation_stack() {
643 if (g_context.isNull()) {
644 // For RPCRequestHandler threads, the ExecutionContext can stay alive
645 // across requests, and hold references to the VM stack, and
646 // the TargetCache needs to keep track of which classes are live etc
647 // So only flush the VM stack and the target cache if the execution
648 // context is dead.
650 if (!t_se.isNull()) {
651 t_se->flush();
653 TargetCache::flush();
657 void Stack::toStringElm(std::ostream& os, TypedValue* tv, const ActRec* fp)
658 const {
659 if (tv->m_type < MinDataType || tv->m_type > MaxNumDataTypes) {
660 os << " ??? type " << tv->m_type << "\n";
661 return;
663 assert(tv->m_type >= MinDataType && tv->m_type < MaxNumDataTypes);
664 if (IS_REFCOUNTED_TYPE(tv->m_type) && tv->m_data.pref->_count <= 0) {
665 // OK in the invoking frame when running a destructor.
666 os << " ??? inner_count " << tv->m_data.pref->_count << " ";
667 return;
669 switch (tv->m_type) {
670 case KindOfRef:
671 os << "V:(";
672 os << "@" << tv->m_data.pref;
673 tv = tv->m_data.pref->tv(); // Unbox so contents get printed below
674 assert(tv->m_type != KindOfRef);
675 toStringElm(os, tv, fp);
676 os << ")";
677 return;
678 case KindOfClass:
679 os << "A:";
680 break;
681 default:
682 os << "C:";
683 break;
685 switch (tv->m_type) {
686 case KindOfUninit: {
687 os << "Undefined";
688 break;
690 case KindOfNull: {
691 os << "Null";
692 break;
694 case KindOfBoolean: {
695 os << (tv->m_data.num ? "True" : "False");
696 break;
698 case KindOfInt64: {
699 os << "0x" << std::hex << tv->m_data.num << std::dec;
700 break;
702 case KindOfDouble: {
703 os << tv->m_data.dbl;
704 break;
706 case KindOfStaticString:
707 case KindOfString: {
708 int len = tv->m_data.pstr->size();
709 bool truncated = false;
710 if (len > 128) {
711 len = 128;
712 truncated = true;
714 os << tv->m_data.pstr
715 << "c(" << tv->m_data.pstr->getCount() << ")"
716 << ":\""
717 << Util::escapeStringForCPP(tv->m_data.pstr->data(), len)
718 << "\"" << (truncated ? "..." : "");
719 break;
721 case KindOfArray: {
722 assert(tv->m_data.parr->getCount() > 0);
723 os << tv->m_data.parr
724 << "c(" << tv->m_data.parr->getCount() << ")"
725 << ":Array";
726 break;
728 case KindOfObject: {
729 assert(tv->m_data.pobj->getCount() > 0);
730 os << tv->m_data.pobj
731 << "c(" << tv->m_data.pobj->getCount() << ")"
732 << ":Object("
733 << tvAsVariant(tv).asObjRef().get()->o_getClassName().get()->data()
734 << ")";
735 break;
737 case KindOfRef: {
738 not_reached();
740 case KindOfClass: {
741 os << tv->m_data.pcls
742 << ":" << tv->m_data.pcls->name()->data();
743 break;
745 default: {
746 os << "?";
747 break;
752 void Stack::toStringIter(std::ostream& os, Iter* it, bool itRef) const {
753 if (itRef) {
754 os << "I:MutableArray";
755 return;
757 switch (it->arr().getIterType()) {
758 case ArrayIter::TypeUndefined: {
759 os << "I:Undefined";
760 break;
762 case ArrayIter::TypeArray: {
763 os << "I:Array";
764 break;
766 case ArrayIter::TypeIterator: {
767 os << "I:Iterator";
768 break;
770 default: {
771 assert(false);
772 os << "I:?";
773 break;
778 void Stack::toStringFrag(std::ostream& os, const ActRec* fp,
779 const TypedValue* top) const {
780 TypedValue* tv;
782 // The only way to figure out which stack elements are activation records is
783 // to follow the frame chain. However, the goal for each stack frame is to
784 // print stack fragments from deepest to shallowest -- a then b in the
785 // following example:
787 // {func:foo,soff:51}<C:8> {func:bar} C:8 C:1 {func:biz} C:0
788 // aaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbb
790 // Use depth-first recursion to get the output order correct.
792 if (LIKELY(!fp->m_func->isGenerator())) {
793 tv = frameStackBase(fp);
794 } else {
795 tv = generatorStackBase(fp);
798 for (tv--; (uintptr_t)tv >= (uintptr_t)top; tv--) {
799 os << " ";
800 toStringElm(os, tv, fp);
804 void Stack::toStringAR(std::ostream& os, const ActRec* fp,
805 const FPIEnt *fe, const TypedValue* top) const {
806 ActRec *ar;
807 if (LIKELY(!fp->m_func->isGenerator())) {
808 ar = arAtOffset(fp, -fe->m_fpOff);
809 } else {
810 // Deal with generators' split stacks. See unwindAR for reasoning.
811 TypedValue* genStackBase = generatorStackBase(fp);
812 ActRec* fakePrevFP =
813 (ActRec*)(genStackBase + fp->m_func->numSlotsInFrame());
814 ar = arAtOffset(fakePrevFP, -fe->m_fpOff);
817 if (fe->m_parentIndex != -1) {
818 toStringAR(os, fp, &fp->m_func->fpitab()[fe->m_parentIndex],
819 (TypedValue*)&ar[1]);
820 } else {
821 toStringFrag(os, fp, (TypedValue*)&ar[1]);
824 os << " {func:" << ar->m_func->fullName()->data() << "}";
825 TypedValue* tv = (TypedValue*)ar;
826 for (tv--; (uintptr_t)tv >= (uintptr_t)top; tv--) {
827 os << " ";
828 toStringElm(os, tv, fp);
832 void Stack::toStringFragAR(std::ostream& os, const ActRec* fp,
833 int offset, const TypedValue* top) const {
834 const FPIEnt *fe = fp->m_func->findFPI(offset);
835 if (fe != nullptr) {
836 toStringAR(os, fp, fe, top);
837 } else {
838 toStringFrag(os, fp, top);
842 void Stack::toStringFrame(std::ostream& os, const ActRec* fp,
843 int offset, const TypedValue* ftop,
844 const string& prefix) const {
845 assert(fp);
847 // Use depth-first recursion to output the most deeply nested stack frame
848 // first.
850 Offset prevPc = 0;
851 TypedValue* prevStackTop = nullptr;
852 ActRec* prevFp = g_vmContext->getPrevVMState(fp, &prevPc, &prevStackTop);
853 if (prevFp != nullptr) {
854 toStringFrame(os, prevFp, prevPc, prevStackTop, prefix);
858 os << prefix;
859 const Func* func = fp->m_func;
860 assert(func);
861 func->validate();
862 string funcName(func->fullName()->data());
863 os << "{func:" << funcName
864 << ",soff:" << fp->m_soff
865 << ",this:0x" << std::hex << (fp->hasThis() ? fp->getThis() : nullptr)
866 << std::dec << "}";
867 TypedValue* tv = (TypedValue*)fp;
868 tv--;
870 if (func->numLocals() > 0) {
871 os << "<";
872 int n = func->numLocals();
873 for (int i = 0; i < n; i++, tv--) {
874 if (i > 0) {
875 os << " ";
877 toStringElm(os, tv, fp);
879 os << ">";
882 assert(!func->info() || func->numIterators() == 0);
883 if (func->numIterators() > 0) {
884 os << "|";
885 Iter* it = &((Iter*)&tv[1])[-1];
886 for (int i = 0; i < func->numIterators(); i++, it--) {
887 if (i > 0) {
888 os << " ";
890 bool itRef;
891 if (func->checkIterScope(offset, i, itRef)) {
892 toStringIter(os, it, itRef);
893 } else {
894 os << "I:Undefined";
897 os << "|";
900 toStringFragAR(os, fp, offset, ftop);
902 os << std::endl;
905 string Stack::toString(const ActRec* fp, int offset,
906 const string prefix/* = "" */) const {
907 std::ostringstream os;
908 os << prefix << "=== Stack at " << curUnit()->filepath()->data() << ":" <<
909 curUnit()->getLineNumber(curUnit()->offsetOf(vmpc())) << " func " <<
910 curFunc()->fullName()->data() << " ===\n";
912 toStringFrame(os, fp, offset, m_top, prefix);
914 return os.str();
917 UnwindStatus Stack::unwindFrag(ActRec* fp, int offset,
918 PC& pc, Fault& fault) {
919 const Func* func = fp->m_func;
920 FTRACE(1, "unwindFrag: func {} ({})\n",
921 func->fullName()->data(), func->unit()->filepath()->data());
923 const bool unwindingGeneratorFrame = func->isGenerator();
924 auto const curOp = *reinterpret_cast<const Opcode*>(pc);
925 using namespace HPHP;
926 const bool unwindingReturningFrame = curOp == OpRetC || curOp == OpRetV;
927 TypedValue* evalTop;
928 if (UNLIKELY(unwindingGeneratorFrame)) {
929 assert(!isValidAddress((uintptr_t)fp));
930 evalTop = generatorStackBase(fp);
931 } else {
932 assert(isValidAddress((uintptr_t)fp));
933 evalTop = frameStackBase(fp);
935 assert(isValidAddress((uintptr_t)evalTop));
936 assert(evalTop >= m_top);
938 while (m_top < evalTop) {
939 popTV();
943 * This code is repeatedly called with the same offset when an
944 * exception is raised and rethrown by fault handlers. This
945 * `faultNest' iterator is here to skip the EHEnt handlers that have
946 * already been run for this in-flight exception.
948 if (const EHEnt* eh = func->findEH(offset)) {
949 int faultNest = 0;
950 for (;;) {
951 assert(faultNest <= fault.m_handledCount);
952 if (faultNest == fault.m_handledCount) {
953 ++fault.m_handledCount;
955 switch (eh->m_ehtype) {
956 case EHEnt::EHType_Fault:
957 FTRACE(1, "unwindFrag: entering fault at {}: save {}\n",
958 eh->m_fault,
959 func->unit()->offsetOf(pc));
960 fault.m_savedRaiseOffset = func->unit()->offsetOf(pc);
961 pc = (uchar*)(func->unit()->entry() + eh->m_fault);
962 return UnwindResumeVM;
963 case EHEnt::EHType_Catch:
964 // Note: we skip catch clauses if we have a pending C++ exception
965 // as part of our efforts to avoid running more PHP code in the
966 // face of such exceptions.
967 if ((fault.m_faultType == Fault::UserException) &&
968 (ThreadInfo::s_threadInfo->m_pendingException == nullptr)) {
969 ObjectData* obj = fault.m_userException;
970 for (auto& idOff : eh->m_catches) {
971 auto handler = func->unit()->at(idOff.second);
972 FTRACE(1, "unwindFrag: catch candidate {}\n", handler);
973 Class* cls = Unit::lookupClass(
974 func->unit()->lookupNamedEntityId(idOff.first)
976 if (cls && obj->instanceof(cls)) {
977 pc = handler;
978 FTRACE(1, "unwindFrag: entering catch at {}\n", pc);
979 return UnwindResumeVM;
983 break;
987 if (eh->m_parentIndex != -1) {
988 eh = &func->ehtab()[eh->m_parentIndex];
989 } else {
990 break;
992 ++faultNest;
996 // We found no more handlers in this frame, so the nested fault
997 // count starts over for the caller frame.
998 fault.m_handledCount = 0;
1000 if (fp->isFromFPushCtor() && fp->hasThis()) {
1001 fp->getThis()->setNoDestruct();
1004 // A generator's locals don't live on this stack.
1005 if (LIKELY(!unwindingGeneratorFrame)) {
1007 * If we're unwinding through a frame that's returning, it's only
1008 * possible that its locals have already been decref'd.
1010 * Here's why:
1012 * - If a destructor for any of these things throws a php
1013 * exception, it's swallowed at the dtor boundary and we keep
1014 * running php.
1016 * - If the destructor for any of these things throws a fatal,
1017 * it's swallowed, and we set surprise flags to throw a fatal
1018 * from now on.
1020 * - If the second case happened and we have to run another
1021 * destructor, its enter hook will throw, but it will be
1022 * swallowed again.
1024 * - Finally, the exit hook for the returning function can
1025 * throw, but this happens last so everything is destructed.
1028 if (!unwindingReturningFrame) {
1029 try {
1030 // Note that we must convert locals and the $this to
1031 // uninit/zero during unwind. This is because a backtrace
1032 // from another destructing object during this unwind may try
1033 // to read them.
1034 frame_free_locals_unwind(fp, func->numLocals());
1035 } catch (...) {}
1037 ndiscard(func->numSlotsInFrame());
1039 FTRACE(1, "unwindFrag: propagate\n");
1040 return UnwindPropagate;
1043 void Stack::unwindARFrag(ActRec* ar) {
1044 while (m_top < (TypedValue*)ar) {
1045 popTV();
1049 void Stack::unwindAR(ActRec* fp, const FPIEnt* fe) {
1050 while (true) {
1051 TRACE(1, "unwindAR: function %s, pIdx %d\n",
1052 fp->m_func->name()->data(), fe->m_parentIndex);
1053 ActRec* ar;
1054 if (LIKELY(!fp->m_func->isGenerator())) {
1055 ar = arAtOffset(fp, -fe->m_fpOff);
1056 } else {
1057 // fp is pointing into the continuation object. Since fpOff is given as an
1058 // offset from the frame pointer as if it were in the normal place on the
1059 // main stack, we have to reconstruct that "normal place".
1060 TypedValue* genStackBase = generatorStackBase(fp);
1061 ActRec* fakePrevFP =
1062 (ActRec*)(genStackBase + fp->m_func->numSlotsInFrame());
1063 ar = arAtOffset(fakePrevFP, -fe->m_fpOff);
1065 assert((TypedValue*)ar >= m_top);
1066 unwindARFrag(ar);
1068 if (ar->isFromFPushCtor()) {
1069 assert(ar->hasThis());
1070 ar->getThis()->setNoDestruct();
1073 popAR();
1074 if (fe->m_parentIndex != -1) {
1075 fe = &fp->m_func->fpitab()[fe->m_parentIndex];
1076 } else {
1077 return;
1082 UnwindStatus Stack::unwindFrame(ActRec*& fp, int offset, PC& pc, Fault fault) {
1083 VMExecutionContext* context = g_vmContext;
1085 while (true) {
1086 SrcKey sk(fp->m_func, offset);
1087 SKTRACE(1, sk, "unwindFrame: func %s, offset %d fp %p\n",
1088 fp->m_func->name()->data(),
1089 offset, fp);
1091 // If the exception is already propagating, if it was in any FPI
1092 // region we already handled unwinding it the first time around.
1093 if (fault.m_handledCount == 0) {
1094 if (const FPIEnt *fe = fp->m_func->findFPI(offset)) {
1095 unwindAR(fp, fe);
1099 if (unwindFrag(fp, offset, pc, fault) == UnwindResumeVM) {
1100 // We've kept our own copy of the Fault, because m_faults may
1101 // change if we have a reentry during unwinding. When we're
1102 // ready to resume, we need to replace the current fault to
1103 // reflect any state changes we've made (handledCount, etc).
1104 assert(!context->m_faults.empty());
1105 context->m_faults.back() = fault;
1106 return UnwindResumeVM;
1109 ActRec *prevFp = fp->arGetSfp();
1110 SKTRACE(1, sk, "unwindFrame: fp %p prevFp %p\n",
1111 fp, prevFp);
1112 if (LIKELY(!fp->m_func->isGenerator())) {
1113 // We don't need to refcount the AR's refcounted members; that was
1114 // taken care of in frame_free_locals, called from unwindFrag().
1115 // If it's a generator, the AR doesn't live on this stack.
1116 discardAR();
1119 if (prevFp == fp) {
1120 TRACE(1, "unwindFrame: reached the end of this nesting's ActRec "
1121 "chain\n");
1122 break;
1124 // Keep the pc up to date while unwinding.
1125 Offset prevOff = fp->m_soff + prevFp->m_func->base();
1126 const Func *prevF = prevFp->m_func;
1127 assert(isValidAddress((uintptr_t)prevFp) || prevF->isGenerator());
1128 pc = prevF->unit()->at(prevOff);
1129 fp = prevFp;
1130 offset = prevOff;
1133 return UnwindPropagate;
1136 bool Stack::wouldOverflow(int numCells) const {
1137 // The funny approach here is to validate the translator's assembly
1138 // technique. We've aligned and sized the stack so that the high order
1139 // bits of valid cells are all the same. In the translator, numCells
1140 // can be hardcoded, and m_top is wired into a register,
1141 // so the expression requires no loads.
1142 intptr_t truncatedTop = intptr_t(m_top) / sizeof(TypedValue);
1143 truncatedTop &= RuntimeOption::EvalVMStackElms - 1;
1144 intptr_t diff = truncatedTop - numCells -
1145 sSurprisePageSize / sizeof(TypedValue);
1146 return diff < 0;
1149 TypedValue* Stack::frameStackBase(const ActRec* fp) {
1150 const Func* func = fp->m_func;
1151 assert(!func->isGenerator());
1152 return (TypedValue*)((uintptr_t)fp
1153 - (uintptr_t)(func->numLocals()) * sizeof(TypedValue)
1154 - (uintptr_t)(func->numIterators() * sizeof(Iter)));
1157 TypedValue* Stack::generatorStackBase(const ActRec* fp) {
1158 assert(fp->m_func->isGenerator());
1159 VMExecutionContext* context = g_vmContext;
1160 ActRec* sfp = fp->arGetSfp();
1161 if (sfp == fp) {
1162 // In the reentrant case, we can consult the savedVM state. We simply
1163 // use the top of stack of the previous VM frame (since the ActRec,
1164 // locals, and iters for this frame do not reside on the VM stack).
1165 return context->m_nestedVMs.back().m_savedState.sp;
1167 // In the non-reentrant case, we know generators are always called from a
1168 // function with an empty stack. So we find the caller's FP, compensate
1169 // for its locals, and then we've found the base of the generator's stack.
1170 return (TypedValue*)sfp - sfp->m_func->numSlotsInFrame();
1174 __thread RequestArenaStorage s_requestArenaStorage;
1175 __thread VarEnvArenaStorage s_varEnvArenaStorage;
1178 //=============================================================================
1179 // ExecutionContext.
1181 using namespace HPHP;
1182 using namespace HPHP::MethodLookup;
1184 ActRec* VMExecutionContext::getOuterVMFrame(const ActRec* ar) {
1185 ActRec* prevFrame = (ActRec*)ar->m_savedRbp;
1186 if (LIKELY(((uintptr_t)prevFrame - Util::s_stackLimit) >=
1187 Util::s_stackSize)) {
1188 if (LIKELY(prevFrame != nullptr)) return prevFrame;
1191 if (LIKELY(!m_nestedVMs.empty())) return m_nestedVMs.back().m_savedState.fp;
1192 return nullptr;
1195 TypedValue* VMExecutionContext::lookupClsCns(const NamedEntity* ne,
1196 const StringData* cls,
1197 const StringData* cns) {
1198 Class* class_ = Unit::loadClass(ne, cls);
1199 if (class_ == nullptr) {
1200 raise_error(Strings::UNKNOWN_CLASS, cls->data());
1202 TypedValue* clsCns = class_->clsCnsGet(cns);
1203 if (clsCns == nullptr) {
1204 raise_error("Couldn't find constant %s::%s",
1205 cls->data(), cns->data());
1207 return clsCns;
1210 TypedValue* VMExecutionContext::lookupClsCns(const StringData* cls,
1211 const StringData* cns) {
1212 return lookupClsCns(Unit::GetNamedEntity(cls), cls, cns);
1215 // Look up the method specified by methodName from the class specified by cls
1216 // and enforce accessibility. Accessibility checks depend on the relationship
1217 // between the class that first declared the method (baseClass) and the context
1218 // class (ctx).
1220 // If there are multiple accessible methods with the specified name declared in
1221 // cls and ancestors of cls, the method from the most derived class will be
1222 // returned, except if we are doing an ObjMethod call ("$obj->foo()") and there
1223 // is an accessible private method, in which case the accessible private method
1224 // will be returned.
1226 // Accessibility rules:
1228 // | baseClass/ctx relationship | public | protected | private |
1229 // +----------------------------+--------+-----------+---------+
1230 // | anon/unrelated | yes | no | no |
1231 // | baseClass == ctx | yes | yes | yes |
1232 // | baseClass derived from ctx | yes | yes | no |
1233 // | ctx derived from baseClass | yes | yes | no |
1234 // +----------------------------+--------+-----------+---------+
1236 const Func* VMExecutionContext::lookupMethodCtx(const Class* cls,
1237 const StringData* methodName,
1238 Class* ctx,
1239 CallType callType,
1240 bool raise /* = false */) {
1241 const Func* method;
1242 if (callType == CtorMethod) {
1243 assert(methodName == nullptr);
1244 method = cls->getCtor();
1245 } else {
1246 assert(callType == ObjMethod || callType == ClsMethod);
1247 assert(methodName != nullptr);
1248 method = cls->lookupMethod(methodName);
1249 while (!method) {
1250 static StringData* sd__construct
1251 = StringData::GetStaticString("__construct");
1252 if (UNLIKELY(methodName == sd__construct)) {
1253 // We were looking up __construct and failed to find it. Fall back
1254 // to old-style constructor: same as class name.
1255 method = cls->getCtor();
1256 if (!Func::isSpecial(method->name())) break;
1258 if (raise) {
1259 raise_error("Call to undefined method %s::%s from %s%s",
1260 cls->name()->data(),
1261 methodName->data(),
1262 ctx ? "context " : "anonymous context",
1263 ctx ? ctx->name()->data() : "");
1265 return nullptr;
1268 assert(method);
1269 bool accessible = true;
1270 // If we found a protected or private method, we need to do some
1271 // accessibility checks.
1272 if ((method->attrs() & (AttrProtected|AttrPrivate)) &&
1273 !g_vmContext->getDebuggerBypassCheck()) {
1274 Class* baseClass = method->baseCls();
1275 assert(baseClass);
1276 // If the context class is the same as the class that first
1277 // declared this method, then we know we have the right method
1278 // and we can stop here.
1279 if (ctx == baseClass) {
1280 return method;
1282 // The anonymous context cannot access protected or private methods,
1283 // so we can fail fast here.
1284 if (ctx == nullptr) {
1285 if (raise) {
1286 raise_error("Call to %s method %s::%s from anonymous context",
1287 (method->attrs() & AttrPrivate) ? "private" : "protected",
1288 cls->name()->data(),
1289 method->name()->data());
1291 return nullptr;
1293 assert(ctx);
1294 if (method->attrs() & AttrPrivate) {
1295 // The context class is not the same as the class that declared
1296 // this private method, so this private method is not accessible.
1297 // We need to keep going because the context class may define a
1298 // private method with this name.
1299 accessible = false;
1300 } else {
1301 // If the context class is derived from the class that first
1302 // declared this protected method, then we know this method is
1303 // accessible and we know the context class cannot have a private
1304 // method with the same name, so we're done.
1305 if (ctx->classof(baseClass)) {
1306 return method;
1308 if (!baseClass->classof(ctx)) {
1309 // The context class is not the same, an ancestor, or a descendent
1310 // of the class that first declared this protected method, so
1311 // this method is not accessible. Because the context class is
1312 // not the same or an ancestor of the class which first declared
1313 // the method, we know that the context class is not the same
1314 // or an ancestor of cls, and therefore we don't need to check
1315 // if the context class declares a private method with this name,
1316 // so we can fail fast here.
1317 if (raise) {
1318 raise_error("Call to protected method %s::%s from context %s",
1319 cls->name()->data(),
1320 method->name()->data(),
1321 ctx->name()->data());
1323 return nullptr;
1325 // We now know this protected method is accessible, but we need to
1326 // keep going because the context class may define a private method
1327 // with this name.
1328 assert(accessible && baseClass->classof(ctx));
1331 // If this is an ObjMethod call ("$obj->foo()") AND there is an ancestor
1332 // of cls that declares a private method with this name AND the context
1333 // class is an ancestor of cls, check if the context class declares a
1334 // private method with this name.
1335 if (method->hasPrivateAncestor() && callType == ObjMethod &&
1336 ctx && cls->classof(ctx)) {
1337 const Func* ctxMethod = ctx->lookupMethod(methodName);
1338 if (ctxMethod && ctxMethod->cls() == ctx &&
1339 (ctxMethod->attrs() & AttrPrivate)) {
1340 // For ObjMethod calls a private method from the context class
1341 // trumps any other method we may have found.
1342 return ctxMethod;
1345 if (accessible) {
1346 return method;
1348 if (raise) {
1349 raise_error("Call to private method %s::%s from %s%s",
1350 method->baseCls()->name()->data(),
1351 method->name()->data(),
1352 ctx ? "context " : "anonymous context",
1353 ctx ? ctx->name()->data() : "");
1355 return nullptr;
1358 LookupResult VMExecutionContext::lookupObjMethod(const Func*& f,
1359 const Class* cls,
1360 const StringData* methodName,
1361 bool raise /* = false */) {
1362 Class* ctx = arGetContextClass(getFP());
1363 f = lookupMethodCtx(cls, methodName, ctx, ObjMethod, false);
1364 if (!f) {
1365 f = cls->lookupMethod(s___call.get());
1366 if (!f) {
1367 if (raise) {
1368 // Throw a fatal error
1369 lookupMethodCtx(cls, methodName, ctx, ObjMethod, true);
1371 return MethodNotFound;
1373 return MagicCallFound;
1375 if (f->attrs() & AttrStatic && !f->isClosureBody()) {
1376 return MethodFoundNoThis;
1378 return MethodFoundWithThis;
1381 LookupResult
1382 VMExecutionContext::lookupClsMethod(const Func*& f,
1383 const Class* cls,
1384 const StringData* methodName,
1385 ObjectData* obj,
1386 bool raise /* = false */) {
1387 Class* ctx = arGetContextClass(getFP());
1388 f = lookupMethodCtx(cls, methodName, ctx, ClsMethod, false);
1389 if (!f) {
1390 if (obj && obj->instanceof(cls)) {
1391 f = obj->getVMClass()->lookupMethod(s___call.get());
1393 if (!f) {
1394 f = cls->lookupMethod(s___callStatic.get());
1395 if (!f) {
1396 if (raise) {
1397 // Throw a fatal errpr
1398 lookupMethodCtx(cls, methodName, ctx, ClsMethod, true);
1400 return MethodNotFound;
1402 f->validate();
1403 assert(f);
1404 assert(f->attrs() & AttrStatic);
1405 return MagicCallStaticFound;
1407 assert(f);
1408 assert(obj);
1409 // __call cannot be static, this should be enforced by semantic
1410 // checks defClass time or earlier
1411 assert(!(f->attrs() & AttrStatic));
1412 return MagicCallFound;
1414 if (obj && !(f->attrs() & AttrStatic) && obj->instanceof(cls)) {
1415 return MethodFoundWithThis;
1417 return MethodFoundNoThis;
1420 LookupResult VMExecutionContext::lookupCtorMethod(const Func*& f,
1421 const Class* cls,
1422 bool raise /* = false */) {
1423 f = cls->getCtor();
1424 if (!(f->attrs() & AttrPublic)) {
1425 Class* ctx = arGetContextClass(getFP());
1426 f = lookupMethodCtx(cls, nullptr, ctx, CtorMethod, raise);
1427 if (!f) {
1428 // If raise was true than lookupMethodCtx should have thrown,
1429 // so we should only be able to get here if raise was false
1430 assert(!raise);
1431 return MethodNotFound;
1434 return MethodFoundWithThis;
1437 ObjectData* VMExecutionContext::createObject(StringData* clsName,
1438 CArrRef params,
1439 bool init /* = true */) {
1440 Class* class_ = Unit::loadClass(clsName);
1441 if (class_ == nullptr) {
1442 throw_missing_class(clsName->data());
1444 Object o;
1445 o = newInstance(class_);
1446 if (init) {
1447 // call constructor
1448 TypedValue ret;
1449 invokeFunc(&ret, class_->getCtor(), params, o.get());
1450 tvRefcountedDecRef(&ret);
1453 ObjectData* ret = o.detach();
1454 ret->decRefCount();
1455 return ret;
1458 ObjectData* VMExecutionContext::createObjectOnly(StringData* clsName) {
1459 return createObject(clsName, null_array, false);
1462 ActRec* VMExecutionContext::getStackFrame() {
1463 VMRegAnchor _;
1464 return getFP();
1467 ObjectData* VMExecutionContext::getThis() {
1468 VMRegAnchor _;
1469 ActRec* fp = getFP();
1470 if (fp->skipFrame()) {
1471 fp = getPrevVMState(fp);
1472 if (!fp) return nullptr;
1474 if (fp->hasThis()) {
1475 return fp->getThis();
1477 return nullptr;
1480 Class* VMExecutionContext::getContextClass() {
1481 VMRegAnchor _;
1482 ActRec* ar = getFP();
1483 assert(ar != nullptr);
1484 if (ar->skipFrame()) {
1485 ar = getPrevVMState(ar);
1486 if (!ar) return nullptr;
1488 return ar->m_func->cls();
1491 Class* VMExecutionContext::getParentContextClass() {
1492 if (Class* ctx = getContextClass()) {
1493 return ctx->parent();
1495 return nullptr;
1498 CStrRef VMExecutionContext::getContainingFileName() {
1499 VMRegAnchor _;
1500 ActRec* ar = getFP();
1501 if (ar == nullptr) return empty_string;
1502 if (ar->skipFrame()) {
1503 ar = getPrevVMState(ar);
1504 if (ar == nullptr) return empty_string;
1506 Unit* unit = ar->m_func->unit();
1507 return unit->filepathRef();
1510 int VMExecutionContext::getLine() {
1511 VMRegAnchor _;
1512 ActRec* ar = getFP();
1513 Unit* unit = ar ? ar->m_func->unit() : nullptr;
1514 Offset pc = unit ? pcOff() : 0;
1515 if (ar == nullptr) return -1;
1516 if (ar->skipFrame()) {
1517 ar = getPrevVMState(ar, &pc);
1519 if (ar == nullptr || (unit = ar->m_func->unit()) == nullptr) return -1;
1520 return unit->getLineNumber(pc);
1523 Array VMExecutionContext::getCallerInfo() {
1524 VMRegAnchor _;
1525 Array result = Array::Create();
1526 ActRec* ar = getFP();
1527 if (ar->skipFrame()) {
1528 ar = getPrevVMState(ar);
1530 while (ar->m_func->name()->isame(s_call_user_func.get())
1531 || ar->m_func->name()->isame(s_call_user_func_array.get())) {
1532 ar = getPrevVMState(ar);
1533 if (ar == nullptr) {
1534 return result;
1538 Offset pc = 0;
1539 ar = getPrevVMState(ar, &pc);
1540 while (ar != nullptr) {
1541 if (!ar->m_func->name()->isame(s_call_user_func.get())
1542 && !ar->m_func->name()->isame(s_call_user_func_array.get())) {
1543 Unit* unit = ar->m_func->unit();
1544 int lineNumber;
1545 if ((lineNumber = unit->getLineNumber(pc)) != -1) {
1546 assert(!unit->filepath()->size() ||
1547 unit->filepath()->data()[0] == '/');
1548 result.set(s_file, unit->filepath()->data(), true);
1549 result.set(s_line, lineNumber);
1550 return result;
1553 ar = getPrevVMState(ar, &pc);
1555 return result;
1558 bool VMExecutionContext::renameFunction(const StringData* oldName,
1559 const StringData* newName) {
1560 return m_renamedFuncs.rename(oldName, newName);
1563 bool VMExecutionContext::isFunctionRenameable(const StringData* name) {
1564 return m_renamedFuncs.isFunctionRenameable(name);
1567 void VMExecutionContext::addRenameableFunctions(ArrayData* arr) {
1568 m_renamedFuncs.addRenameableFunctions(arr);
1571 VarEnv* VMExecutionContext::getVarEnv() {
1572 Transl::VMRegAnchor _;
1574 VarEnv* builtinVarEnv = nullptr;
1575 ActRec* fp = getFP();
1576 if (UNLIKELY(!fp)) return NULL;
1577 if (fp->skipFrame()) {
1578 if (fp->hasVarEnv()) {
1579 builtinVarEnv = fp->getVarEnv();
1581 fp = getPrevVMState(fp);
1583 if (!fp) return nullptr;
1584 assert(!fp->hasInvName());
1585 if (!fp->hasVarEnv()) {
1586 if (builtinVarEnv) {
1587 // If the builtin function has its own VarEnv, we temporarily
1588 // remove it from the list before making a VarEnv for the calling
1589 // function to satisfy various asserts
1590 assert(builtinVarEnv == m_topVarEnv);
1591 m_topVarEnv = m_topVarEnv->previous();
1593 fp->m_varEnv = VarEnv::createLazyAttach(fp);
1594 if (builtinVarEnv) {
1595 // Put the builtin function's VarEnv back in the list
1596 builtinVarEnv->setPrevious(fp->m_varEnv);
1597 m_topVarEnv = builtinVarEnv;
1600 return fp->m_varEnv;
1603 void VMExecutionContext::setVar(StringData* name, TypedValue* v, bool ref) {
1604 Transl::VMRegAnchor _;
1605 // setVar() should only be called after getVarEnv() has been called
1606 // to create a varEnv
1607 ActRec *fp = getFP();
1608 if (!fp) return;
1609 if (fp->skipFrame()) {
1610 fp = getPrevVMState(fp);
1612 assert(!fp->hasInvName());
1613 assert(!fp->hasExtraArgs());
1614 assert(fp->m_varEnv != nullptr);
1615 if (ref) {
1616 fp->m_varEnv->bind(name, v);
1617 } else {
1618 fp->m_varEnv->set(name, v);
1622 Array VMExecutionContext::getLocalDefinedVariables(int frame) {
1623 Transl::VMRegAnchor _;
1624 ActRec *fp = getFP();
1625 for (; frame > 0; --frame) {
1626 if (!fp) break;
1627 fp = getPrevVMState(fp);
1629 if (!fp) {
1630 return Array::Create();
1632 assert(!fp->hasInvName());
1633 if (fp->hasVarEnv()) {
1634 return fp->m_varEnv->getDefinedVariables();
1636 Array ret = Array::Create();
1637 const Func *func = fp->m_func;
1638 for (Id id = 0; id < func->numNamedLocals(); ++id) {
1639 TypedValue* ptv = frame_local(fp, id);
1640 if (ptv->m_type == KindOfUninit) {
1641 continue;
1643 Variant name(func->localVarName(id)->data());
1644 ret.add(name, tvAsVariant(ptv));
1646 return ret;
1649 void VMExecutionContext::shuffleMagicArgs(ActRec* ar) {
1650 // We need to put this where the first argument is
1651 StringData* invName = ar->getInvName();
1652 int nargs = ar->numArgs();
1653 ar->setVarEnv(nullptr);
1654 assert(!ar->hasVarEnv() && !ar->hasInvName());
1655 // We need to make an array containing all the arguments passed by the
1656 // caller and put it where the second argument is
1657 ArrayData* argArray = pack_args_into_array(ar, nargs);
1658 argArray->incRefCount();
1659 // Remove the arguments from the stack
1660 for (int i = 0; i < nargs; ++i) {
1661 m_stack.popC();
1663 // Move invName to where the first argument belongs, no need
1664 // to incRef/decRef since we are transferring ownership
1665 m_stack.pushStringNoRc(invName);
1666 // Move argArray to where the second argument belongs. We've already
1667 // incReffed the array above so we don't need to do it here.
1668 m_stack.pushArrayNoRc(argArray);
1670 ar->setNumArgs(2);
1673 static inline void checkStack(Stack& stk, const Func* f) {
1674 ThreadInfo* info = ThreadInfo::s_threadInfo.getNoCheck();
1675 // Check whether func's maximum stack usage would overflow the stack.
1676 // Both native and VM stack overflows are independently possible.
1677 if (!stack_in_bounds(info) ||
1678 stk.wouldOverflow(f->maxStackCells() + kStackCheckPadding)) {
1679 TRACE(1, "Maximum VM stack depth exceeded.\n");
1680 raise_error("Stack overflow");
1684 bool VMExecutionContext::prepareFuncEntry(ActRec *ar, PC& pc) {
1685 const Func* func = ar->m_func;
1686 Offset firstDVInitializer = InvalidAbsoluteOffset;
1687 bool raiseMissingArgumentWarnings = false;
1688 int nparams = func->numParams();
1689 if (UNLIKELY(ar->m_varEnv != nullptr)) {
1691 * m_varEnv != nullptr => we have a varEnv, extraArgs, or an invName.
1693 if (ar->hasInvName()) {
1694 // shuffleMagicArgs deals with everything. no need for
1695 // further argument munging
1696 shuffleMagicArgs(ar);
1697 } else if (ar->hasVarEnv()) {
1698 m_fp = ar;
1699 if (!func->isGenerator()) {
1700 assert(func->isPseudoMain());
1701 pushLocalsAndIterators(func);
1702 ar->m_varEnv->attach(ar);
1704 pc = func->getEntry();
1705 // Nothing more to do; get out
1706 return true;
1707 } else {
1708 assert(ar->hasExtraArgs());
1709 assert(func->numParams() < ar->numArgs());
1711 } else {
1712 int nargs = ar->numArgs();
1713 if (nargs != nparams) {
1714 if (nargs < nparams) {
1715 // Push uninitialized nulls for missing arguments. Some of them may end
1716 // up getting default-initialized, but regardless, we need to make space
1717 // for them on the stack.
1718 const Func::ParamInfoVec& paramInfo = func->params();
1719 for (int i = nargs; i < nparams; ++i) {
1720 m_stack.pushUninit();
1721 Offset dvInitializer = paramInfo[i].funcletOff();
1722 if (dvInitializer == InvalidAbsoluteOffset) {
1723 // We wait to raise warnings until after all the locals have been
1724 // initialized. This is important because things need to be in a
1725 // consistent state in case the user error handler throws.
1726 raiseMissingArgumentWarnings = true;
1727 } else if (firstDVInitializer == InvalidAbsoluteOffset) {
1728 // This is the first unpassed arg with a default value, so
1729 // this is where we'll need to jump to.
1730 firstDVInitializer = dvInitializer;
1733 } else {
1734 if (func->attrs() & AttrMayUseVV) {
1735 // Extra parameters must be moved off the stack.
1736 const int numExtras = nargs - nparams;
1737 ar->setExtraArgs(ExtraArgs::allocateCopy((TypedValue*)ar - nargs,
1738 numExtras));
1739 m_stack.ndiscard(numExtras);
1740 } else {
1741 // The function we're calling is not marked as "MayUseVV",
1742 // so just discard the extra arguments
1743 int numExtras = nargs - nparams;
1744 for (int i = 0; i < numExtras; i++) {
1745 m_stack.popTV();
1747 ar->setNumArgs(nparams);
1753 int nlocals = nparams;
1754 if (UNLIKELY(func->isClosureBody())) {
1755 int nuse = init_closure(ar, m_stack.top());
1756 // init_closure doesn't move m_stack
1757 m_stack.nalloc(nuse);
1758 nlocals += nuse;
1759 func = ar->m_func;
1762 if (LIKELY(!func->isGenerator())) {
1764 * we only get here from callAndResume
1765 * if we failed to get a translation for
1766 * a generator's prologue
1768 pushLocalsAndIterators(func, nlocals);
1771 m_fp = ar;
1772 if (firstDVInitializer != InvalidAbsoluteOffset) {
1773 pc = func->unit()->entry() + firstDVInitializer;
1774 } else {
1775 pc = func->getEntry();
1777 // cppext functions/methods have their own logic for raising
1778 // warnings for missing arguments, so we only need to do this work
1779 // for non-cppext functions/methods
1780 if (raiseMissingArgumentWarnings && !func->info()) {
1781 // need to sync m_pc to pc for backtraces/re-entry
1782 SYNC();
1783 const Func::ParamInfoVec& paramInfo = func->params();
1784 for (int i = ar->numArgs(); i < nparams; ++i) {
1785 Offset dvInitializer = paramInfo[i].funcletOff();
1786 if (dvInitializer == InvalidAbsoluteOffset) {
1787 const char* name = func->name()->data();
1788 if (nparams == 1) {
1789 raise_warning(Strings::MISSING_ARGUMENT, name, i);
1790 } else {
1791 raise_warning(Strings::MISSING_ARGUMENTS, name, nparams, i);
1796 return true;
1799 void VMExecutionContext::syncGdbState() {
1800 if (RuntimeOption::EvalJit && !RuntimeOption::EvalJitNoGdb) {
1801 tx64->m_debugInfo.debugSync();
1805 void VMExecutionContext::enterVMPrologue(ActRec* enterFnAr) {
1806 assert(enterFnAr);
1807 Stats::inc(Stats::VMEnter);
1808 if (ThreadInfo::s_threadInfo->m_reqInjectionData.getJit()) {
1809 int np = enterFnAr->m_func->numParams();
1810 int na = enterFnAr->numArgs();
1811 if (na > np) na = np + 1;
1812 TCA start = enterFnAr->m_func->getPrologue(na);
1813 tx64->enterTCAtProlog(enterFnAr, start);
1814 } else {
1815 if (prepareFuncEntry(enterFnAr, m_pc)) {
1816 enterVMWork(enterFnAr);
1821 void VMExecutionContext::enterVMWork(ActRec* enterFnAr) {
1822 TCA start = nullptr;
1823 if (enterFnAr) {
1824 if (!EventHook::FunctionEnter(enterFnAr, EventHook::NormalFunc)) return;
1825 checkStack(m_stack, enterFnAr->m_func);
1826 start = enterFnAr->m_func->getFuncBody();
1828 Stats::inc(Stats::VMEnter);
1829 if (ThreadInfo::s_threadInfo->m_reqInjectionData.getJit()) {
1830 (void) curUnit()->offsetOf(m_pc); /* assert */
1831 if (enterFnAr) {
1832 assert(start);
1833 tx64->enterTCAfterProlog(start);
1834 } else {
1835 SrcKey sk(curFunc(), m_pc);
1836 tx64->enterTCAtSrcKey(sk);
1838 } else {
1839 dispatch();
1843 // Enumeration codes for the handling of VM exceptions.
1844 enum {
1845 EXCEPTION_START = 0,
1846 EXCEPTION_PROPAGATE,
1847 EXCEPTION_RESUMEVM,
1848 EXCEPTION_DEBUGGER
1851 static void pushFault(Fault::Type t, Exception* e, const Object* o = nullptr) {
1852 FTRACE(1, "pushing new fault: {} {} {}\n",
1853 t == Fault::UserException ? "[user exception]" : "[cpp exception]",
1854 e, o);
1856 VMExecutionContext* ec = g_vmContext;
1857 Fault fault;
1858 fault.m_faultType = t;
1859 if (t == Fault::UserException) {
1860 // User object.
1861 assert(o);
1862 fault.m_userException = o->get();
1863 fault.m_userException->incRefCount();
1864 } else {
1865 fault.m_cppException = e;
1867 ec->m_faults.push_back(fault);
1870 static int exception_handler() {
1871 int longJmpType;
1872 try {
1873 throw;
1874 } catch (const Object& e) {
1875 pushFault(Fault::UserException, nullptr, &e);
1876 longJmpType = g_vmContext->hhvmPrepareThrow();
1877 } catch (VMSwitchModeException &e) {
1878 longJmpType = g_vmContext->switchMode(e.unwindBuiltin());
1879 } catch (Exception &e) {
1880 pushFault(Fault::CppException, e.clone());
1881 longJmpType = g_vmContext->hhvmPrepareThrow();
1882 } catch (std::exception& e) {
1883 pushFault(Fault::CppException,
1884 new Exception("unexpected %s: %s", typeid(e).name(), e.what()));
1885 longJmpType = g_vmContext->hhvmPrepareThrow();
1886 } catch (...) {
1887 pushFault(Fault::CppException,
1888 new Exception("unknown exception"));
1889 longJmpType = g_vmContext->hhvmPrepareThrow();
1891 return longJmpType;
1894 void VMExecutionContext::enterVM(TypedValue* retval, ActRec* ar) {
1895 m_firstAR = ar;
1896 ar->m_savedRip = (uintptr_t)tx64->getCallToExit();
1897 assert(isReturnHelper(ar->m_savedRip));
1899 DEBUG_ONLY int faultDepth = m_faults.size();
1900 SCOPE_EXIT {
1901 if (debug) assert(m_faults.size() == faultDepth);
1905 * TODO(#1343044): some of the structure of this code dates back to
1906 * when it used to be setjmp/longjmp based. It is probable we could
1907 * simplify it a little more, and maybe combine some of the logic
1908 * with exception_handler().
1910 * When an exception is propagating, each nesting of the VM is
1911 * responsible for unwinding its portion of the execution stack, and
1912 * finding user handlers if it is a catchable exception.
1914 * This try/catch is where all this logic is centered. The actual
1915 * unwinding happens under hhvmPrepareThrow, which returns a new
1916 * "jumpCode" here to indicate what to do next. Either we'll enter
1917 * the VM loop again at a user error/fault handler, or propagate the
1918 * exception to a less-nested VM.
1920 int jumpCode = EXCEPTION_START;
1921 short_jump:
1922 try {
1923 switch (jumpCode) {
1924 case EXCEPTION_START:
1925 if (m_fp && !ar->m_varEnv) {
1926 enterVMPrologue(ar);
1927 } else {
1928 if (prepareFuncEntry(ar, m_pc)) {
1929 enterVMWork(ar);
1932 break;
1933 case EXCEPTION_PROPAGATE:
1934 // Jump out of this try/catch before throwing.
1935 goto propagate;
1936 case EXCEPTION_DEBUGGER:
1937 // Triggered by switchMode() to switch VM mode
1938 // do nothing but reenter the VM with same VM stack
1939 /* Fallthrough */
1940 case EXCEPTION_RESUMEVM:
1941 enterVMWork(0);
1942 break;
1943 default:
1944 NOT_REACHED();
1946 } catch (const VMPrepareUnwind&) {
1947 // This is slightly different from VMPrepareThrow, because we need
1948 // to re-raise the exception as if it came from the same offset.
1949 Fault fault = m_faults.back();
1950 Offset faultPC = fault.m_savedRaiseOffset;
1951 FTRACE(1, "unwind: restoring offset {}\n", faultPC);
1952 assert(faultPC != kInvalidOffset);
1953 fault.m_savedRaiseOffset = kInvalidOffset;
1954 UnwindStatus unwindType = m_stack.unwindFrame(m_fp, faultPC, m_pc, fault);
1955 jumpCode = handleUnwind(unwindType);
1956 goto short_jump;
1957 } catch (...) {
1958 assert(tl_regState == REGSTATE_CLEAN);
1959 jumpCode = exception_handler();
1960 assert(jumpCode != EXCEPTION_START);
1961 goto short_jump;
1964 *retval = *m_stack.topTV();
1965 m_stack.discard();
1966 return;
1968 propagate:
1969 assert(m_faults.size() > 0);
1970 Fault fault = m_faults.back();
1971 m_faults.pop_back();
1972 switch (fault.m_faultType) {
1973 case Fault::UserException: {
1974 Object obj = fault.m_userException;
1975 fault.m_userException->decRefCount();
1976 throw obj;
1978 case Fault::CppException:
1979 // throwException() will take care of deleting heap-allocated
1980 // exception object for us
1981 fault.m_cppException->throwException();
1982 NOT_REACHED();
1983 default:
1984 not_implemented();
1986 NOT_REACHED();
1989 void VMExecutionContext::reenterVM(TypedValue* retval,
1990 ActRec* ar,
1991 TypedValue* savedSP) {
1992 ar->m_soff = 0;
1993 ar->m_savedRbp = 0;
1994 VMState savedVM = { getPC(), getFP(), m_firstAR, savedSP };
1995 TRACE(3, "savedVM: %p %p %p %p\n", m_pc, m_fp, m_firstAR, savedSP);
1996 pushVMState(savedVM, ar);
1997 assert(m_nestedVMs.size() >= 1);
1998 try {
1999 enterVM(retval, ar);
2000 popVMState();
2001 } catch (...) {
2002 popVMState();
2003 throw;
2005 TRACE(1, "Reentry: exit fp %p pc %p\n", m_fp, m_pc);
2008 int VMExecutionContext::switchMode(bool unwindBuiltin) {
2009 if (unwindBuiltin) {
2010 // from Jit calling a builtin, should unwind a frame, and push a
2011 // return value on stack
2012 tx64->sync(); // just to set tl_regState
2013 unwindBuiltinFrame();
2014 m_stack.pushNull();
2016 return EXCEPTION_DEBUGGER;
2019 void VMExecutionContext::invokeFunc(TypedValue* retval,
2020 const Func* f,
2021 CArrRef params,
2022 ObjectData* this_ /* = NULL */,
2023 Class* cls /* = NULL */,
2024 VarEnv* varEnv /* = NULL */,
2025 StringData* invName /* = NULL */,
2026 InvokeFlags flags /* = InvokeNormal */) {
2027 assert(retval);
2028 assert(f);
2029 // If this is a regular function, this_ and cls must be NULL
2030 assert(f->preClass() || f->isPseudoMain() || (!this_ && !cls));
2031 // If this is a method, either this_ or cls must be non-NULL
2032 assert(!f->preClass() || (this_ || cls));
2033 // If this is a static method, this_ must be NULL
2034 assert(!(f->attrs() & AttrStatic && !f->isClosureBody()) ||
2035 (!this_));
2036 // invName should only be non-NULL if we are calling __call or
2037 // __callStatic
2038 assert(!invName || f->name()->isame(s___call.get()) ||
2039 f->name()->isame(s___callStatic.get()));
2040 // If a variable environment is being inherited then params must be empty
2041 assert(!varEnv || params.empty());
2043 VMRegAnchor _;
2045 bool isMagicCall = (invName != nullptr);
2047 if (this_ != nullptr) {
2048 this_->incRefCount();
2050 Cell* savedSP = m_stack.top();
2052 if (f->numParams() > kStackCheckReenterPadding - kNumActRecCells) {
2053 checkStack(m_stack, f);
2056 if (flags & InvokePseudoMain) {
2057 assert(f->isPseudoMain() && !params.get());
2058 Unit* toMerge = f->unit();
2059 toMerge->merge();
2060 if (toMerge->isMergeOnly()) {
2061 *retval = *toMerge->getMainReturn();
2062 return;
2066 ActRec* ar = m_stack.allocA();
2067 ar->m_soff = 0;
2068 ar->m_savedRbp = 0;
2069 ar->m_func = f;
2070 if (this_) {
2071 ar->setThis(this_);
2072 } else if (cls) {
2073 ar->setClass(cls);
2074 } else {
2075 ar->setThis(nullptr);
2077 if (isMagicCall) {
2078 ar->initNumArgs(2);
2079 } else {
2080 ar->initNumArgs(params.size());
2082 ar->setVarEnv(varEnv);
2084 #ifdef HPHP_TRACE
2085 if (m_fp == nullptr) {
2086 TRACE(1, "Reentry: enter %s(%p) from top-level\n",
2087 f->name()->data(), ar);
2088 } else {
2089 TRACE(1, "Reentry: enter %s(pc %p ar %p) from %s(%p)\n",
2090 f->name()->data(), m_pc, ar,
2091 m_fp->m_func ? m_fp->m_func->name()->data() : "unknownBuiltin", m_fp);
2093 #endif
2095 ArrayData *arr = params.get();
2096 if (isMagicCall) {
2097 // Put the method name into the location of the first parameter. We
2098 // are transferring ownership, so no need to incRef/decRef here.
2099 m_stack.pushStringNoRc(invName);
2100 // Put array of arguments into the location of the second parameter
2101 m_stack.pushArray(arr);
2102 } else if (arr) {
2103 const int numParams = f->numParams();
2104 const int numExtraArgs = arr->size() - numParams;
2105 ExtraArgs* extraArgs = nullptr;
2106 if (numExtraArgs > 0 && (f->attrs() & AttrMayUseVV)) {
2107 extraArgs = ExtraArgs::allocateUninit(numExtraArgs);
2108 ar->setExtraArgs(extraArgs);
2110 int paramId = 0;
2111 for (ssize_t i = arr->iter_begin();
2112 i != ArrayData::invalid_index;
2113 i = arr->iter_advance(i), ++paramId) {
2114 TypedValue *from = arr->nvGetValueRef(i);
2115 TypedValue *to;
2116 if (LIKELY(paramId < numParams)) {
2117 to = m_stack.allocTV();
2118 } else {
2119 if (!(f->attrs() & AttrMayUseVV)) {
2120 // Discard extra arguments, since the function cannot
2121 // possibly use them.
2122 assert(extraArgs == nullptr);
2123 ar->setNumArgs(numParams);
2124 break;
2126 assert(extraArgs != nullptr && numExtraArgs > 0);
2127 // VarEnv expects the extra args to be in "reverse" order
2128 // (i.e. the last extra arg has the lowest address)
2129 to = extraArgs->getExtraArg(paramId - numParams);
2131 tvDup(from, to);
2132 if (LIKELY(!f->byRef(paramId))) {
2133 if (to->m_type == KindOfRef) {
2134 tvUnbox(to);
2136 } else if (!(flags & InvokeIgnoreByRefErrors) &&
2137 (from->m_type != KindOfRef ||
2138 from->m_data.pref->_count == 2)) {
2139 raise_warning("Parameter %d to %s() expected to be "
2140 "a reference, value given",
2141 paramId + 1, f->fullName()->data());
2142 if (skipCufOnInvalidParams) {
2143 if (extraArgs) {
2144 int n = paramId >= numParams ? paramId - numParams + 1 : 0;
2145 ExtraArgs::deallocate(extraArgs, n);
2146 ar->m_varEnv = nullptr;
2147 paramId -= n;
2149 while (paramId >= 0) {
2150 m_stack.popTV();
2151 paramId--;
2153 m_stack.popAR();
2154 tvWriteNull(retval);
2155 return;
2161 if (m_fp) {
2162 reenterVM(retval, ar, savedSP);
2163 } else {
2164 assert(m_nestedVMs.size() == 0);
2165 enterVM(retval, ar);
2169 void VMExecutionContext::invokeFuncFew(TypedValue* retval,
2170 const Func* f,
2171 void* thisOrCls,
2172 StringData* invName,
2173 int argc, TypedValue* argv) {
2174 assert(retval);
2175 assert(f);
2176 // If this is a regular function, this_ and cls must be NULL
2177 assert(f->preClass() || !thisOrCls);
2178 // If this is a method, either this_ or cls must be non-NULL
2179 assert(!f->preClass() || thisOrCls);
2180 // If this is a static method, this_ must be NULL
2181 assert(!(f->attrs() & AttrStatic && !f->isClosureBody()) ||
2182 !ActRec::decodeThis(thisOrCls));
2183 // invName should only be non-NULL if we are calling __call or
2184 // __callStatic
2185 assert(!invName || f->name()->isame(s___call.get()) ||
2186 f->name()->isame(s___callStatic.get()));
2188 VMRegAnchor _;
2190 if (ObjectData* thiz = ActRec::decodeThis(thisOrCls)) {
2191 thiz->incRefCount();
2193 Cell* savedSP = m_stack.top();
2194 if (argc > kStackCheckReenterPadding - kNumActRecCells) {
2195 checkStack(m_stack, f);
2197 ActRec* ar = m_stack.allocA();
2198 ar->m_soff = 0;
2199 ar->m_savedRbp = 0;
2200 ar->m_func = f;
2201 ar->m_this = (ObjectData*)thisOrCls;
2202 ar->initNumArgs(argc);
2203 if (UNLIKELY(invName != nullptr)) {
2204 ar->setInvName(invName);
2205 } else {
2206 ar->m_varEnv = nullptr;
2209 #ifdef HPHP_TRACE
2210 if (m_fp == nullptr) {
2211 TRACE(1, "Reentry: enter %s(%p) from top-level\n",
2212 f->name()->data(), ar);
2213 } else {
2214 TRACE(1, "Reentry: enter %s(pc %p ar %p) from %s(%p)\n",
2215 f->name()->data(), m_pc, ar,
2216 m_fp->m_func ? m_fp->m_func->name()->data() : "unknownBuiltin", m_fp);
2218 #endif
2220 for (int i = 0; i < argc; i++) {
2221 *m_stack.allocTV() = *argv++;
2224 if (m_fp) {
2225 reenterVM(retval, ar, savedSP);
2226 } else {
2227 assert(m_nestedVMs.size() == 0);
2228 enterVM(retval, ar);
2232 void VMExecutionContext::invokeContFunc(const Func* f,
2233 ObjectData* this_,
2234 TypedValue* param /* = NULL */) {
2235 assert(f);
2236 assert(this_);
2238 EagerVMRegAnchor _;
2240 this_->incRefCount();
2242 Cell* savedSP = m_stack.top();
2244 // no need to check stack due to ReenterPadding
2245 assert(kStackCheckReenterPadding - kNumActRecCells >= 1);
2247 ActRec* ar = m_stack.allocA();
2248 ar->m_savedRbp = 0;
2249 ar->m_func = f;
2250 ar->m_soff = 0;
2251 ar->initNumArgs(param != nullptr ? 1 : 0);
2252 ar->setThis(this_);
2253 ar->setVarEnv(nullptr);
2255 if (param != nullptr) {
2256 tvDup(param, m_stack.allocTV());
2259 TypedValue retval;
2260 reenterVM(&retval, ar, savedSP);
2261 // Codegen for generator functions guarantees that they will return null
2262 assert(IS_NULL_TYPE(retval.m_type));
2265 void VMExecutionContext::invokeUnit(TypedValue* retval, Unit* unit) {
2266 Func* func = unit->getMain();
2267 invokeFunc(retval, func, null_array, nullptr, nullptr,
2268 m_globalVarEnv, nullptr, InvokePseudoMain);
2271 void VMExecutionContext::unwindBuiltinFrame() {
2272 // Unwind the frame for a builtin. Currently only used for
2273 // hphpd_break and fb_enable_code_coverage
2274 assert(m_fp->m_func->info());
2275 assert(m_fp->m_func->name()->isame(s_hphpd_break.get()) ||
2276 m_fp->m_func->name()->isame(s_fb_enable_code_coverage.get()));
2277 // Free any values that may be on the eval stack
2278 TypedValue *evalTop = (TypedValue*)getFP();
2279 while (m_stack.topTV() < evalTop) {
2280 m_stack.popTV();
2282 // Free the locals and VarEnv if there is one
2283 frame_free_locals_inl(m_fp, m_fp->m_func->numLocals());
2284 // Tear down the frame
2285 Offset pc = -1;
2286 ActRec* sfp = getPrevVMState(m_fp, &pc);
2287 assert(pc != -1);
2288 m_fp = sfp;
2289 m_pc = m_fp->m_func->unit()->at(pc);
2290 m_stack.discardAR();
2293 int VMExecutionContext::hhvmPrepareThrow() {
2294 Fault& fault = m_faults.back();
2295 tx64->sync();
2296 TRACE(2, "hhvmPrepareThrow: %p(\"%s\") {\n", m_fp,
2297 m_fp->m_func->name()->data());
2298 UnwindStatus unwindType;
2299 unwindType = m_stack.unwindFrame(m_fp, pcOff(),
2300 m_pc, fault);
2301 return handleUnwind(unwindType);
2305 * Given a pointer to a VM frame, returns the previous VM frame in the call
2306 * stack. This function will also pass back by reference the previous PC (if
2307 * prevPc is non-null) and the previous SP (if prevSp is non-null).
2309 * If there is no previous VM frame, this function returns NULL and does not
2310 * set prevPc and prevSp.
2312 ActRec* VMExecutionContext::getPrevVMState(const ActRec* fp,
2313 Offset* prevPc /* = NULL */,
2314 TypedValue** prevSp /* = NULL */,
2315 bool* fromVMEntry /* = NULL */) {
2316 if (fp == nullptr) {
2317 return nullptr;
2319 ActRec* prevFp = fp->arGetSfp();
2320 if (prevFp != fp) {
2321 if (prevSp) {
2322 if (UNLIKELY(fp->m_func->isGenerator())) {
2323 *prevSp = (TypedValue*)prevFp - prevFp->m_func->numSlotsInFrame();
2324 } else {
2325 *prevSp = (TypedValue*)&fp[1];
2328 if (prevPc) *prevPc = prevFp->m_func->base() + fp->m_soff;
2329 if (fromVMEntry) *fromVMEntry = false;
2330 return prevFp;
2332 // Linear search from end of m_nestedVMs. In practice, we're probably
2333 // looking for something recently pushed.
2334 int i = m_nestedVMs.size() - 1;
2335 for (; i >= 0; --i) {
2336 if (m_nestedVMs[i].m_entryFP == fp) break;
2338 if (i == -1) return nullptr;
2339 const VMState& vmstate = m_nestedVMs[i].m_savedState;
2340 prevFp = vmstate.fp;
2341 assert(prevFp);
2342 assert(prevFp->m_func->unit());
2343 if (prevSp) *prevSp = vmstate.sp;
2344 if (prevPc) *prevPc = prevFp->m_func->unit()->offsetOf(vmstate.pc);
2345 if (fromVMEntry) *fromVMEntry = true;
2346 return prevFp;
2349 Array VMExecutionContext::debugBacktrace(bool skip /* = false */,
2350 bool withSelf /* = false */,
2351 bool withThis /* = false */,
2352 VMParserFrame*
2353 parserFrame /* = NULL */) {
2354 Array bt = Array::Create();
2356 // If there is a parser frame, put it at the beginning of
2357 // the backtrace
2358 if (parserFrame) {
2359 bt.append(
2360 ArrayInit(2)
2361 .set(s_file, parserFrame->filename, true)
2362 .set(s_line, parserFrame->lineNumber, true)
2363 .toVariant()
2367 Transl::VMRegAnchor _;
2368 if (!getFP()) {
2369 // If there are no VM frames, we're done
2370 return bt;
2373 int depth = 0;
2374 ActRec* fp = nullptr;
2375 Offset pc = 0;
2377 // Get the fp and pc of the top frame (possibly skipping one frame)
2379 if (skip) {
2380 fp = getPrevVMState(getFP(), &pc);
2381 if (!fp) {
2382 // We skipped over the only VM frame, we're done
2383 return bt;
2385 } else {
2386 fp = getFP();
2387 Unit *unit = getFP()->m_func->unit();
2388 assert(unit);
2389 pc = unit->offsetOf(m_pc);
2392 // Handle the top frame
2393 if (withSelf) {
2394 // Builtins don't have a file and line number
2395 if (!fp->m_func->isBuiltin()) {
2396 Unit *unit = fp->m_func->unit();
2397 assert(unit);
2398 const char* filename = unit->filepath()->data();
2399 if (fp->m_func->originalFilename()) {
2400 filename = fp->m_func->originalFilename()->data();
2402 assert(filename);
2403 Offset off = pc;
2405 ArrayInit frame(parserFrame ? 4 : 2);
2406 frame.set(s_file, filename, true);
2407 frame.set(s_line, unit->getLineNumber(off), true);
2408 if (parserFrame) {
2409 frame.set(s_function, s_include, true);
2410 frame.set(s_args, Array::Create(parserFrame->filename), true);
2412 bt.append(frame.toVariant());
2413 depth++;
2418 // Handle the subsequent VM frames
2419 Offset prevPc = 0;
2420 for (ActRec* prevFp = getPrevVMState(fp, &prevPc); fp != nullptr;
2421 fp = prevFp, pc = prevPc, prevFp = getPrevVMState(fp, &prevPc)) {
2422 // do not capture frame for HPHP only functions
2423 if (fp->m_func->isNoInjection()) {
2424 continue;
2427 ArrayInit frame(7);
2429 auto const curUnit = fp->m_func->unit();
2430 auto const curOp = *reinterpret_cast<const Opcode*>(curUnit->at(pc));
2431 auto const isReturning = curOp == OpRetC || curOp == OpRetV;
2433 // Builtins and generators don't have a file and line number
2434 if (prevFp && !prevFp->m_func->isBuiltin() && !fp->m_func->isGenerator()) {
2435 auto const prevUnit = prevFp->m_func->unit();
2436 auto prevFile = prevUnit->filepath();
2437 if (prevFp->m_func->originalFilename()) {
2438 prevFile = prevFp->m_func->originalFilename();
2440 assert(prevFile);
2441 frame.set(s_file, const_cast<StringData*>(prevFile), true);
2443 // In the normal method case, the "saved pc" for line number printing is
2444 // pointing at the cell conversion (Unbox/Pop) instruction, not the call
2445 // itself. For multi-line calls, this instruction is associated with the
2446 // subsequent line which results in an off-by-n. We're subtracting one
2447 // in order to look up the line associated with the FCall/FCallArray
2448 // instruction. Exception handling and the other opcodes (ex. BoxR)
2449 // already do the right thing. The emitter associates object access with
2450 // the subsequent expression and this would be difficult to modify.
2451 auto const opAtPrevPc =
2452 *reinterpret_cast<const Opcode*>(prevUnit->at(prevPc));
2453 Offset pcAdjust = 0;
2454 if (opAtPrevPc == OpPopR || opAtPrevPc == OpUnboxR) {
2455 pcAdjust = 1;
2457 frame.set(s_line,
2458 prevFp->m_func->unit()->getLineNumber(prevPc - pcAdjust),
2459 true);
2462 // check for include
2463 String funcname = const_cast<StringData*>(fp->m_func->name());
2464 if (fp->m_func->isGenerator()) {
2465 // retrieve the original function name from the inner continuation
2466 TypedValue* tv = frame_local(fp, 0);
2467 assert(tv->m_type == HPHP::KindOfObject);
2468 funcname = static_cast<c_Continuation*>(
2469 tv->m_data.pobj)->t_getorigfuncname();
2472 if (fp->m_func->isClosureBody()) {
2473 static StringData* s_closure_label =
2474 StringData::GetStaticString("{closure}");
2475 funcname = s_closure_label;
2478 // check for pseudomain
2479 if (funcname->empty()) {
2480 if (!prevFp) continue;
2481 funcname = s_include;
2484 frame.set(s_function, funcname, true);
2486 if (!funcname.same(s_include)) {
2487 // Closures have an m_this but they aren't in object context
2488 Class* ctx = arGetContextClass(fp);
2489 if (ctx != nullptr && !fp->m_func->isClosureBody()) {
2490 frame.set(s_class, ctx->name()->data(), true);
2491 if (fp->hasThis() && !isReturning) {
2492 if (withThis) {
2493 frame.set(s_object, Object(fp->getThis()), true);
2495 frame.set(s_type, "->", true);
2496 } else {
2497 frame.set(s_type, "::", true);
2502 Array args = Array::Create();
2503 if (funcname.same(s_include)) {
2504 if (depth) {
2505 args.append(const_cast<StringData*>(curUnit->filepath()));
2506 frame.set(s_args, args, true);
2508 } else if (!RuntimeOption::EnableArgsInBacktraces || isReturning) {
2509 // Provide an empty 'args' array to be consistent with hphpc
2510 frame.set(s_args, args, true);
2511 } else {
2512 int nparams = fp->m_func->numParams();
2513 int nargs = fp->numArgs();
2514 /* builtin extra args are not stored in varenv */
2515 if (nargs <= nparams) {
2516 for (int i = 0; i < nargs; i++) {
2517 TypedValue *arg = frame_local(fp, i);
2518 args.append(tvAsVariant(arg));
2520 } else {
2521 int i;
2522 for (i = 0; i < nparams; i++) {
2523 TypedValue *arg = frame_local(fp, i);
2524 args.append(tvAsVariant(arg));
2526 for (; i < nargs; i++) {
2527 TypedValue *arg = fp->getExtraArg(i - nparams);
2528 args.append(tvAsVariant(arg));
2531 frame.set(s_args, args, true);
2534 bt.append(frame.toVariant());
2535 depth++;
2537 return bt;
2540 MethodInfoVM::~MethodInfoVM() {
2541 for (std::vector<const ClassInfo::ParameterInfo*>::iterator it =
2542 parameters.begin(); it != parameters.end(); ++it) {
2543 if ((*it)->value != nullptr) {
2544 free((void*)(*it)->value);
2549 ClassInfoVM::~ClassInfoVM() {
2550 destroyMembers(m_methodsVec);
2551 destroyMapValues(m_properties);
2552 destroyMapValues(m_constants);
2555 Array VMExecutionContext::getUserFunctionsInfo() {
2556 // Return an array of all user-defined function names. This method is used to
2557 // support get_defined_functions().
2558 return Unit::getUserFunctions();
2561 Array VMExecutionContext::getConstantsInfo() {
2562 // Return an array of all defined constant:value pairs. This method is used
2563 // to support get_defined_constants().
2564 return Array::Create();
2567 const ClassInfo::MethodInfo* VMExecutionContext::findFunctionInfo(
2568 CStrRef name) {
2569 StringIMap<AtomicSmartPtr<MethodInfoVM> >::iterator it =
2570 m_functionInfos.find(name);
2571 if (it == m_functionInfos.end()) {
2572 Func* func = Unit::loadFunc(name.get());
2573 if (func == nullptr || func->builtinFuncPtr()) {
2574 return nullptr;
2576 AtomicSmartPtr<MethodInfoVM> &m = m_functionInfos[name];
2577 m = new MethodInfoVM();
2578 func->getFuncInfo(m.get());
2579 return m.get();
2580 } else {
2581 return it->second.get();
2585 const ClassInfo* VMExecutionContext::findClassInfo(CStrRef name) {
2586 if (name->empty()) return nullptr;
2587 StringIMap<AtomicSmartPtr<ClassInfoVM> >::iterator it =
2588 m_classInfos.find(name);
2589 if (it == m_classInfos.end()) {
2590 Class* cls = Unit::lookupClass(name.get());
2591 if (cls == nullptr) return nullptr;
2592 if (cls->clsInfo()) return cls->clsInfo();
2593 if (cls->attrs() & (AttrInterface | AttrTrait)) {
2594 // If the specified name matches with something that is not formally
2595 // a class, return NULL
2596 return nullptr;
2598 AtomicSmartPtr<ClassInfoVM> &c = m_classInfos[name];
2599 c = new ClassInfoVM();
2600 cls->getClassInfo(c.get());
2601 return c.get();
2602 } else {
2603 return it->second.get();
2607 const ClassInfo* VMExecutionContext::findInterfaceInfo(CStrRef name) {
2608 StringIMap<AtomicSmartPtr<ClassInfoVM> >::iterator it =
2609 m_interfaceInfos.find(name);
2610 if (it == m_interfaceInfos.end()) {
2611 Class* cls = Unit::lookupClass(name.get());
2612 if (cls == nullptr) return nullptr;
2613 if (cls->clsInfo()) return cls->clsInfo();
2614 if (!(cls->attrs() & AttrInterface)) {
2615 // If the specified name matches with something that is not formally
2616 // an interface, return NULL
2617 return nullptr;
2619 AtomicSmartPtr<ClassInfoVM> &c = m_interfaceInfos[name];
2620 c = new ClassInfoVM();
2621 cls->getClassInfo(c.get());
2622 return c.get();
2623 } else {
2624 return it->second.get();
2628 const ClassInfo* VMExecutionContext::findTraitInfo(CStrRef name) {
2629 StringIMap<AtomicSmartPtr<ClassInfoVM> >::iterator it =
2630 m_traitInfos.find(name);
2631 if (it != m_traitInfos.end()) {
2632 return it->second.get();
2634 Class* cls = Unit::lookupClass(name.get());
2635 if (cls == nullptr) return nullptr;
2636 if (cls->clsInfo()) return cls->clsInfo();
2637 if (!(cls->attrs() & AttrTrait)) {
2638 return nullptr;
2640 AtomicSmartPtr<ClassInfoVM> &classInfo = m_traitInfos[name];
2641 classInfo = new ClassInfoVM();
2642 cls->getClassInfo(classInfo.get());
2643 return classInfo.get();
2646 const ClassInfo::ConstantInfo* VMExecutionContext::findConstantInfo(
2647 CStrRef name) {
2648 TypedValue* tv = Unit::lookupCns(name.get());
2649 if (tv == nullptr) {
2650 return nullptr;
2652 ConstInfoMap::const_iterator it = m_constInfo.find(name.get());
2653 if (it != m_constInfo.end()) {
2654 return it->second;
2656 StringData* key = StringData::GetStaticString(name.get());
2657 ClassInfo::ConstantInfo* ci = new ClassInfo::ConstantInfo();
2658 ci->name = *(const String*)&key;
2659 ci->valueLen = 0;
2660 ci->valueText = "";
2661 ci->setValue(tvAsCVarRef(tv));
2662 m_constInfo[key] = ci;
2663 return ci;
2666 HPHP::Eval::PhpFile* VMExecutionContext::lookupPhpFile(StringData* path,
2667 const char* currentDir,
2668 bool* initial_opt) {
2669 bool init;
2670 bool &initial = initial_opt ? *initial_opt : init;
2671 initial = true;
2673 struct stat s;
2674 String spath = Eval::resolveVmInclude(path, currentDir, &s);
2675 if (spath.isNull()) return nullptr;
2677 // Check if this file has already been included.
2678 EvaledFilesMap::const_iterator it = m_evaledFiles.find(spath.get());
2679 HPHP::Eval::PhpFile* efile = nullptr;
2680 if (it != m_evaledFiles.end()) {
2681 // We found it! Return the unit.
2682 efile = it->second;
2683 initial = false;
2684 if (!initial_opt) efile->incRef();
2685 return efile;
2687 // We didn't find it, so try the realpath.
2688 bool alreadyResolved =
2689 RuntimeOption::RepoAuthoritative ||
2690 (!RuntimeOption::CheckSymLink && (spath[0] == '/'));
2691 bool hasRealpath = false;
2692 String rpath;
2693 if (!alreadyResolved) {
2694 std::string rp = StatCache::realpath(spath.data());
2695 if (rp.size() != 0) {
2696 rpath = NEW(StringData)(rp.data(), rp.size(), CopyString);
2697 if (!rpath.same(spath)) {
2698 hasRealpath = true;
2699 it = m_evaledFiles.find(rpath.get());
2700 if (it != m_evaledFiles.end()) {
2701 // We found it! Update the mapping for spath and
2702 // return the unit.
2703 efile = it->second;
2704 m_evaledFiles[spath.get()] = efile;
2705 spath.get()->incRefCount();
2706 efile->incRef();
2707 initial = false;
2708 if (!initial_opt) efile->incRef();
2709 return efile;
2714 // This file hasn't been included yet, so we need to parse the file
2715 efile = HPHP::Eval::FileRepository::checkoutFile(
2716 hasRealpath ? rpath.get() : spath.get(), s);
2717 assert(!efile || efile->getRef() > 0);
2718 if (efile && initial_opt) {
2719 // if initial_opt is not set, this shouldnt be recorded as a
2720 // per request fetch of the file.
2721 if (Transl::TargetCache::testAndSetBit(efile->getId())) {
2722 initial = false;
2724 // if parsing was successful, update the mappings for spath and
2725 // rpath (if it exists).
2726 m_evaledFiles[spath.get()] = efile;
2727 spath.get()->incRefCount();
2728 // Don't incRef efile; checkoutFile() already counted it.
2729 if (hasRealpath) {
2730 m_evaledFiles[rpath.get()] = efile;
2731 rpath.get()->incRefCount();
2732 efile->incRef();
2734 DEBUGGER_ATTACHED_ONLY(phpDebuggerFileLoadHook(efile));
2736 return efile;
2739 Unit* VMExecutionContext::evalInclude(StringData* path,
2740 const StringData* curUnitFilePath,
2741 bool* initial) {
2742 namespace fs = boost::filesystem;
2743 HPHP::Eval::PhpFile* efile = nullptr;
2744 if (curUnitFilePath) {
2745 fs::path currentUnit(curUnitFilePath->data());
2746 fs::path currentDir(currentUnit.branch_path());
2747 efile = lookupPhpFile(path, currentDir.string().c_str(), initial);
2748 } else {
2749 efile = lookupPhpFile(path, "", initial);
2751 if (efile) {
2752 return efile->unit();
2754 return nullptr;
2757 HPHP::Unit* VMExecutionContext::evalIncludeRoot(
2758 StringData* path, InclOpFlags flags, bool* initial) {
2759 HPHP::Eval::PhpFile* efile = lookupIncludeRoot(path, flags, initial);
2760 return efile ? efile->unit() : 0;
2763 HPHP::Eval::PhpFile* VMExecutionContext::lookupIncludeRoot(StringData* path,
2764 InclOpFlags flags,
2765 bool* initial,
2766 Unit* unit) {
2767 String absPath;
2768 if ((flags & InclOpRelative)) {
2769 namespace fs = boost::filesystem;
2770 if (!unit) unit = getFP()->m_func->unit();
2771 fs::path currentUnit(unit->filepath()->data());
2772 fs::path currentDir(currentUnit.branch_path());
2773 absPath = currentDir.string() + '/';
2774 TRACE(2, "lookupIncludeRoot(%s): relative -> %s\n",
2775 path->data(),
2776 absPath->data());
2777 } else {
2778 assert(flags & InclOpDocRoot);
2779 absPath = SourceRootInfo::GetCurrentPhpRoot();
2780 TRACE(2, "lookupIncludeRoot(%s): docRoot -> %s\n",
2781 path->data(),
2782 absPath->data());
2785 absPath += StrNR(path);
2787 EvaledFilesMap::const_iterator it = m_evaledFiles.find(absPath.get());
2788 if (it != m_evaledFiles.end()) {
2789 if (initial) *initial = false;
2790 if (!initial) it->second->incRef();
2791 return it->second;
2794 return lookupPhpFile(absPath.get(), "", initial);
2798 Instantiate hoistable classes and functions.
2799 If there is any more work left to do, setup a
2800 new frame ready to execute the pseudomain.
2802 return true iff the pseudomain needs to be executed.
2804 bool VMExecutionContext::evalUnit(Unit* unit, bool local,
2805 PC& pc, int funcType) {
2806 m_pc = pc;
2807 unit->merge();
2808 if (unit->isMergeOnly()) {
2809 Stats::inc(Stats::PseudoMain_Skipped);
2810 *m_stack.allocTV() = *unit->getMainReturn();
2811 return false;
2813 Stats::inc(Stats::PseudoMain_Executed);
2816 ActRec* ar = m_stack.allocA();
2817 assert((uintptr_t)&ar->m_func < (uintptr_t)&ar->m_r);
2818 Class* cls = curClass();
2819 if (local) {
2820 cls = nullptr;
2821 ar->setThis(nullptr);
2822 } else if (m_fp->hasThis()) {
2823 ObjectData *this_ = m_fp->getThis();
2824 this_->incRefCount();
2825 ar->setThis(this_);
2826 } else if (m_fp->hasClass()) {
2827 ar->setClass(m_fp->getClass());
2828 } else {
2829 ar->setThis(nullptr);
2831 Func* func = unit->getMain(cls);
2832 assert(!func->info());
2833 assert(!func->isGenerator());
2834 ar->m_func = func;
2835 ar->initNumArgs(0);
2836 assert(getFP());
2837 assert(!m_fp->hasInvName());
2838 arSetSfp(ar, m_fp);
2839 ar->m_soff = uintptr_t(m_fp->m_func->unit()->offsetOf(pc) -
2840 m_fp->m_func->base());
2841 ar->m_savedRip = (uintptr_t)tx64->getRetFromInterpretedFrame();
2842 assert(isReturnHelper(ar->m_savedRip));
2843 pushLocalsAndIterators(func);
2844 if (local) {
2845 ar->m_varEnv = 0;
2846 } else {
2847 if (!m_fp->hasVarEnv()) {
2848 m_fp->m_varEnv = VarEnv::createLazyAttach(m_fp);
2850 ar->m_varEnv = m_fp->m_varEnv;
2851 ar->m_varEnv->attach(ar);
2853 m_fp = ar;
2854 pc = func->getEntry();
2855 SYNC();
2856 bool ret = EventHook::FunctionEnter(m_fp, funcType);
2857 pc = m_pc;
2858 return ret;
2861 CVarRef VMExecutionContext::getEvaledArg(const StringData* val) {
2862 CStrRef key = *(String*)&val;
2864 if (m_evaledArgs.get()) {
2865 CVarRef arg = m_evaledArgs.get()->get(key);
2866 if (&arg != &null_variant) return arg;
2868 String code = HPHP::concat3("<?php return ", key, ";");
2869 Unit* unit = compileEvalString(code.get());
2870 assert(unit != nullptr);
2871 Variant v;
2872 // Default arg values are not currently allowed to depend on class context.
2873 g_vmContext->invokeFunc((TypedValue*)&v, unit->getMain(),
2874 null_array, nullptr, nullptr, nullptr, nullptr,
2875 InvokePseudoMain);
2876 Variant &lv = m_evaledArgs.lvalAt(key, AccessFlags::Key);
2877 lv = v;
2878 return lv;
2882 * Helper for function entry, including pseudo-main entry.
2884 void
2885 VMExecutionContext::pushLocalsAndIterators(const Func* func,
2886 int nparams /*= 0*/) {
2887 // Push locals.
2888 for (int i = nparams; i < func->numLocals(); i++) {
2889 m_stack.pushUninit();
2891 // Push iterators.
2892 for (int i = 0; i < func->numIterators(); i++) {
2893 m_stack.allocI();
2897 void VMExecutionContext::enqueueSharedVar(SharedVariant* svar) {
2898 m_freedSvars.push_back(svar);
2901 class FreedSVars : public Treadmill::WorkItem {
2902 SVarVector m_svars;
2903 public:
2904 explicit FreedSVars(SVarVector&& svars) : m_svars(std::move(svars)) {}
2905 virtual void operator()() {
2906 for (auto it = m_svars.begin(); it != m_svars.end(); it++) {
2907 delete *it;
2912 void VMExecutionContext::treadmillSharedVars() {
2913 Treadmill::WorkItem::enqueue(new FreedSVars(std::move(m_freedSvars)));
2916 void VMExecutionContext::destructObjects() {
2917 if (UNLIKELY(RuntimeOption::EnableObjDestructCall)) {
2918 while (!m_liveBCObjs.empty()) {
2919 ObjectData* o = *m_liveBCObjs.begin();
2920 Instance* instance = static_cast<Instance*>(o);
2921 instance->destruct(); // Let the instance remove the node.
2923 m_liveBCObjs.clear();
2927 // Evaled units have a footprint in the TC and translation metadata. The
2928 // applications we care about tend to have few, short, stereotyped evals,
2929 // where the same code keeps getting eval'ed over and over again; so we
2930 // keep around units for each eval'ed string, so that the TC space isn't
2931 // wasted on each eval.
2932 typedef RankedCHM<StringData*, HPHP::Unit*,
2933 StringDataHashCompare,
2934 RankEvaledUnits> EvaledUnitsMap;
2935 static EvaledUnitsMap s_evaledUnits;
2936 Unit* VMExecutionContext::compileEvalString(StringData* code) {
2937 EvaledUnitsMap::accessor acc;
2938 // Promote this to a static string; otherwise it may get swept
2939 // across requests.
2940 code = StringData::GetStaticString(code);
2941 if (s_evaledUnits.insert(acc, code)) {
2942 acc->second = compile_string(code->data(), code->size());
2944 return acc->second;
2947 CStrRef VMExecutionContext::createFunction(CStrRef args, CStrRef code) {
2948 VMRegAnchor _;
2949 // It doesn't matter if there's a user function named __lambda_func; we only
2950 // use this name during parsing, and then change it to an impossible name
2951 // with a NUL byte before we merge it into the request's func map. This also
2952 // has the bonus feature that the value of __FUNCTION__ inside the created
2953 // function will match Zend. (Note: Zend will actually fatal if there's a
2954 // user function named __lambda_func when you call create_function. Huzzah!)
2955 static StringData* oldName = StringData::GetStaticString("__lambda_func");
2956 std::ostringstream codeStr;
2957 codeStr << "<?php function " << oldName->data()
2958 << "(" << args.data() << ") {"
2959 << code.data() << "}\n";
2960 StringData* evalCode = StringData::GetStaticString(codeStr.str());
2961 Unit* unit = compile_string(evalCode->data(), evalCode->size());
2962 // Move the function to a different name.
2963 std::ostringstream newNameStr;
2964 newNameStr << '\0' << "lambda_" << ++m_lambdaCounter;
2965 StringData* newName = StringData::GetStaticString(newNameStr.str());
2966 unit->renameFunc(oldName, newName);
2967 m_createdFuncs.push_back(unit);
2968 unit->merge();
2970 // Technically we shouldn't have to eval the unit right now (it'll execute
2971 // the pseudo-main, which should be empty) and could get away with just
2972 // mergeFuncs. However, Zend does it this way, as proven by the fact that you
2973 // can inject code into the evaled unit's pseudo-main:
2975 // create_function('', '} echo "hi"; if (0) {');
2977 // We have to eval now to emulate this behavior.
2978 TypedValue retval;
2979 invokeFunc(&retval, unit->getMain(), null_array,
2980 nullptr, nullptr, nullptr, nullptr,
2981 InvokePseudoMain);
2983 // __lambda_func will be the only hoistable function.
2984 // Any functions or closures defined in it will not be hoistable.
2985 Func* lambda = unit->firstHoistable();
2986 return lambda->nameRef();
2989 void VMExecutionContext::evalPHPDebugger(TypedValue* retval, StringData *code,
2990 int frame) {
2991 assert(retval);
2992 // The code has "<?php" prepended already
2993 Unit* unit = compileEvalString(code);
2994 if (unit == nullptr) {
2995 raise_error("Syntax error");
2996 tvWriteNull(retval);
2997 return;
3000 VarEnv *varEnv = nullptr;
3001 ActRec *fp = getFP();
3002 ActRec *cfpSave = nullptr;
3003 if (fp) {
3004 VarEnv* vit = nullptr;
3005 for (; frame > 0; --frame) {
3006 if (fp->hasVarEnv()) {
3007 if (!vit) {
3008 vit = m_topVarEnv;
3009 } else if (vit != fp->m_varEnv) {
3010 vit = vit->previous();
3012 assert(vit == fp->m_varEnv);
3014 ActRec* prevFp = getPrevVMState(fp);
3015 if (!prevFp) {
3016 // To be safe in case we failed to get prevFp. This would mean we've
3017 // been asked to eval in a frame which is beyond the top of the stack.
3018 // This suggests the debugger client has made an error.
3019 break;
3021 fp = prevFp;
3023 if (!fp->hasVarEnv()) {
3024 if (!vit) {
3025 fp->m_varEnv = VarEnv::createLazyAttach(fp);
3026 } else {
3027 const bool skipInsert = true;
3028 fp->m_varEnv = VarEnv::createLazyAttach(fp, skipInsert);
3029 // Slide it in front of the VarEnv most recently above it.
3030 fp->m_varEnv->setPrevious(vit->previous());
3031 vit->setPrevious(fp->m_varEnv);
3034 varEnv = fp->m_varEnv;
3035 cfpSave = varEnv->getCfp();
3037 ObjectData *this_ = nullptr;
3038 // NB: the ActRec and function within the AR may have different classes. The
3039 // class in the ActRec is the type used when invoking the function (i.e.,
3040 // Derived in Derived::Foo()) while the class obtained from the function is
3041 // the type that declared the function Foo, which may be Base. We need both
3042 // the class to match any object that this function may have been invoked on,
3043 // and we need the class from the function execution is stopped in.
3044 Class *frameClass = nullptr;
3045 Class *functionClass = nullptr;
3046 if (fp) {
3047 if (fp->hasThis()) {
3048 this_ = fp->getThis();
3049 } else if (fp->hasClass()) {
3050 frameClass = fp->getClass();
3052 functionClass = fp->m_func->cls();
3053 phpDebuggerEvalHook(fp->m_func);
3056 const static StaticString s_cppException("Hit an exception");
3057 const static StaticString s_phpException("Hit a php exception");
3058 const static StaticString s_exit("Hit exit");
3059 const static StaticString s_fatal("Hit fatal");
3060 try {
3061 // Invoke the given PHP, possibly specialized to match the type of the
3062 // current function on the stack, optionally passing a this pointer or
3063 // class used to execute the current function.
3064 invokeFunc(retval, unit->getMain(functionClass), null_array,
3065 this_, frameClass, varEnv, nullptr, InvokePseudoMain);
3066 } catch (FatalErrorException &e) {
3067 g_vmContext->write(s_fatal);
3068 g_vmContext->write(" : ");
3069 g_vmContext->write(e.getMessage().c_str());
3070 g_vmContext->write("\n");
3071 g_vmContext->write(ExtendedLogger::StringOfStackTrace(e.getBackTrace()));
3072 } catch (ExitException &e) {
3073 g_vmContext->write(s_exit.data());
3074 g_vmContext->write(" : ");
3075 std::ostringstream os;
3076 os << ExitException::ExitCode;
3077 g_vmContext->write(os.str());
3078 } catch (Eval::DebuggerException &e) {
3079 if (varEnv) {
3080 varEnv->setCfp(cfpSave);
3082 throw;
3083 } catch (Exception &e) {
3084 g_vmContext->write(s_cppException.data());
3085 g_vmContext->write(" : ");
3086 g_vmContext->write(e.getMessage().c_str());
3087 ExtendedException* ee = dynamic_cast<ExtendedException*>(&e);
3088 if (ee) {
3089 g_vmContext->write("\n");
3090 g_vmContext->write(
3091 ExtendedLogger::StringOfStackTrace(ee->getBackTrace()));
3093 } catch (Object &e) {
3094 g_vmContext->write(s_phpException.data());
3095 g_vmContext->write(" : ");
3096 g_vmContext->write(e->t___tostring().data());
3097 } catch (...) {
3098 g_vmContext->write(s_cppException.data());
3101 if (varEnv) {
3102 // The debugger eval frame may have attached to the VarEnv from a
3103 // frame that was not the top frame, so we need to manually set
3104 // cfp back to what it was before
3105 varEnv->setCfp(cfpSave);
3109 void VMExecutionContext::enterDebuggerDummyEnv() {
3110 static Unit* s_debuggerDummy = nullptr;
3111 if (!s_debuggerDummy) {
3112 s_debuggerDummy = compile_string("<?php?>", 7);
3114 VarEnv* varEnv = m_topVarEnv;
3115 if (!getFP()) {
3116 assert(m_stack.count() == 0);
3117 ActRec* ar = m_stack.allocA();
3118 ar->m_func = s_debuggerDummy->getMain();
3119 ar->setThis(nullptr);
3120 ar->m_soff = 0;
3121 ar->m_savedRbp = 0;
3122 ar->m_savedRip = (uintptr_t)tx64->getCallToExit();
3123 assert(isReturnHelper(ar->m_savedRip));
3124 m_fp = ar;
3125 m_pc = s_debuggerDummy->entry();
3126 m_firstAR = ar;
3128 m_fp->setVarEnv(varEnv);
3129 varEnv->attach(m_fp);
3132 void VMExecutionContext::exitDebuggerDummyEnv() {
3133 assert(m_topVarEnv);
3134 assert(m_globalVarEnv == m_topVarEnv);
3135 m_globalVarEnv->detach(getFP());
3138 // Identifies the set of return helpers that we may set m_savedRip to in an
3139 // ActRec.
3140 bool VMExecutionContext::isReturnHelper(uintptr_t address) {
3141 return ((address == (uintptr_t)tx64->getRetFromInterpretedFrame()) ||
3142 (address == (uintptr_t)tx64->getRetFromInterpretedGeneratorFrame()) ||
3143 (address == (uintptr_t)tx64->getCallToExit()));
3146 // Walk the stack and find any return address to jitted code and bash it to
3147 // the appropriate RetFromInterpreted*Frame helper. This ensures that we don't
3148 // return into jitted code and gives the system the proper chance to interpret
3149 // blacklisted tracelets.
3150 void VMExecutionContext::preventReturnsToTC() {
3151 assert(isDebuggerAttached());
3152 if (RuntimeOption::EvalJit) {
3153 ActRec *ar = getFP();
3154 while (ar) {
3155 if (!isReturnHelper(ar->m_savedRip) &&
3156 (tx64->isValidCodeAddress((TCA)ar->m_savedRip))) {
3157 TRACE_RB(2, "Replace RIP in fp %p, savedRip 0x%lx, "
3158 "func %s\n", ar, ar->m_savedRip,
3159 ar->m_func->fullName()->data());
3160 if (ar->m_func->isGenerator()) {
3161 ar->m_savedRip =
3162 (uintptr_t)tx64->getRetFromInterpretedGeneratorFrame();
3163 } else {
3164 ar->m_savedRip =
3165 (uintptr_t)tx64->getRetFromInterpretedFrame();
3167 assert(isReturnHelper(ar->m_savedRip));
3169 ar = getPrevVMState(ar);
3174 static inline StringData* lookup_name(TypedValue* key) {
3175 return prepareKey(key);
3178 static inline void lookup_var(ActRec* fp,
3179 StringData*& name,
3180 TypedValue* key,
3181 TypedValue*& val) {
3182 name = lookup_name(key);
3183 const Func* func = fp->m_func;
3184 Id id = func->lookupVarId(name);
3185 if (id != kInvalidId) {
3186 val = frame_local(fp, id);
3187 } else {
3188 assert(!fp->hasInvName());
3189 if (fp->hasVarEnv()) {
3190 val = fp->m_varEnv->lookup(name);
3191 } else {
3192 val = nullptr;
3197 static inline void lookupd_var(ActRec* fp,
3198 StringData*& name,
3199 TypedValue* key,
3200 TypedValue*& val) {
3201 name = lookup_name(key);
3202 const Func* func = fp->m_func;
3203 Id id = func->lookupVarId(name);
3204 if (id != kInvalidId) {
3205 val = frame_local(fp, id);
3206 } else {
3207 assert(!fp->hasInvName());
3208 if (!fp->hasVarEnv()) {
3209 fp->m_varEnv = VarEnv::createLazyAttach(fp);
3211 val = fp->m_varEnv->lookup(name);
3212 if (val == nullptr) {
3213 TypedValue tv;
3214 tvWriteNull(&tv);
3215 fp->m_varEnv->set(name, &tv);
3216 val = fp->m_varEnv->lookup(name);
3221 static inline void lookup_gbl(ActRec* fp,
3222 StringData*& name,
3223 TypedValue* key,
3224 TypedValue*& val) {
3225 name = lookup_name(key);
3226 assert(g_vmContext->m_globalVarEnv);
3227 val = g_vmContext->m_globalVarEnv->lookup(name);
3230 static inline void lookupd_gbl(ActRec* fp,
3231 StringData*& name,
3232 TypedValue* key,
3233 TypedValue*& val) {
3234 name = lookup_name(key);
3235 assert(g_vmContext->m_globalVarEnv);
3236 VarEnv* varEnv = g_vmContext->m_globalVarEnv;
3237 val = varEnv->lookup(name);
3238 if (val == nullptr) {
3239 TypedValue tv;
3240 tvWriteNull(&tv);
3241 varEnv->set(name, &tv);
3242 val = varEnv->lookup(name);
3246 static inline void lookup_sprop(ActRec* fp,
3247 TypedValue* clsRef,
3248 StringData*& name,
3249 TypedValue* key,
3250 TypedValue*& val,
3251 bool& visible,
3252 bool& accessible) {
3253 assert(clsRef->m_type == KindOfClass);
3254 name = lookup_name(key);
3255 Class* ctx = arGetContextClass(fp);
3256 val = clsRef->m_data.pcls->getSProp(ctx, name, visible, accessible);
3259 static inline void lookupClsRef(TypedValue* input,
3260 TypedValue* output,
3261 bool decRef = false) {
3262 const Class* class_ = nullptr;
3263 if (IS_STRING_TYPE(input->m_type)) {
3264 class_ = Unit::loadClass(input->m_data.pstr);
3265 if (class_ == nullptr) {
3266 output->m_type = KindOfNull;
3267 raise_error(Strings::UNKNOWN_CLASS, input->m_data.pstr->data());
3269 } else if (input->m_type == KindOfObject) {
3270 class_ = input->m_data.pobj->getVMClass();
3271 } else {
3272 output->m_type = KindOfNull;
3273 raise_error("Cls: Expected string or object");
3275 if (decRef) {
3276 tvRefcountedDecRef(input);
3278 output->m_data.pcls = const_cast<Class*>(class_);
3279 output->m_type = KindOfClass;
3282 static UNUSED int innerCount(const TypedValue* tv) {
3283 if (IS_REFCOUNTED_TYPE(tv->m_type)) {
3284 // We're using pref here arbitrarily; any refcounted union member works.
3285 return tv->m_data.pref->_count;
3287 return -1;
3290 static inline void ratchetRefs(TypedValue*& result, TypedValue& tvRef,
3291 TypedValue& tvRef2) {
3292 TRACE(5, "Ratchet: result %p(k%d c%d), ref %p(k%d c%d) ref2 %p(k%d c%d)\n",
3293 result, result->m_type, innerCount(result),
3294 &tvRef, tvRef.m_type, innerCount(&tvRef),
3295 &tvRef2, tvRef2.m_type, innerCount(&tvRef2));
3296 // Due to complications associated with ArrayAccess, it is possible to acquire
3297 // a reference as a side effect of vector operation processing. Such a
3298 // reference must be retained until after the next iteration is complete.
3299 // Therefore, move the reference from tvRef to tvRef2, so that the reference
3300 // will be released one iteration later. But only do this if tvRef was used in
3301 // this iteration, otherwise we may wipe out the last reference to something
3302 // that we need to stay alive until the next iteration.
3303 if (tvRef.m_type != KindOfUninit) {
3304 if (IS_REFCOUNTED_TYPE(tvRef2.m_type)) {
3305 tvDecRef(&tvRef2);
3306 TRACE(5, "Ratchet: decref tvref2\n");
3307 tvWriteUninit(&tvRef2);
3310 memcpy(&tvRef2, &tvRef, sizeof(TypedValue));
3311 tvWriteUninit(&tvRef);
3312 // Update result to point to relocated reference. This can be done
3313 // unconditionally here because we maintain the invariant throughout that
3314 // either tvRef is KindOfUninit, or tvRef contains a valid object that
3315 // result points to.
3316 assert(result == &tvRef);
3317 result = &tvRef2;
3321 #define DECLARE_MEMBERHELPER_ARGS \
3322 unsigned ndiscard; \
3323 TypedValue* base; \
3324 TypedValue tvScratch; \
3325 TypedValue tvLiteral; \
3326 TypedValue tvRef; \
3327 TypedValue tvRef2; \
3328 MemberCode mcode = MEL; \
3329 TypedValue* curMember = 0;
3330 #define DECLARE_SETHELPER_ARGS DECLARE_MEMBERHELPER_ARGS
3331 #define DECLARE_GETHELPER_ARGS \
3332 DECLARE_MEMBERHELPER_ARGS \
3333 TypedValue* tvRet;
3335 #define MEMBERHELPERPRE_ARGS \
3336 pc, ndiscard, base, tvScratch, tvLiteral, \
3337 tvRef, tvRef2, mcode, curMember
3339 // The following arguments are outputs:
3340 // pc: bytecode instruction after the vector instruction
3341 // ndiscard: number of stack elements to discard
3342 // base: ultimate result of the vector-get
3343 // tvScratch: temporary result storage
3344 // tvRef: temporary result storage
3345 // tvRef2: temporary result storage
3346 // mcode: output MemberCode for the last member if LeaveLast
3347 // curMember: output last member value one if LeaveLast; but undefined
3348 // if the last mcode == MW
3350 // If saveResult is true, then upon completion of getHelperPre(),
3351 // tvScratch contains a reference to the result (a duplicate of what
3352 // base refers to). getHelperPost<true>(...) then saves the result
3353 // to its final location.
3354 template <bool warn,
3355 bool saveResult,
3356 VMExecutionContext::VectorLeaveCode mleave>
3357 inline void OPTBLD_INLINE VMExecutionContext::getHelperPre(
3358 PC& pc,
3359 unsigned& ndiscard,
3360 TypedValue*& base,
3361 TypedValue& tvScratch,
3362 TypedValue& tvLiteral,
3363 TypedValue& tvRef,
3364 TypedValue& tvRef2,
3365 MemberCode& mcode,
3366 TypedValue*& curMember) {
3367 memberHelperPre<false, warn, false, false,
3368 false, 0, mleave, saveResult>(MEMBERHELPERPRE_ARGS);
3371 #define GETHELPERPOST_ARGS ndiscard, tvRet, tvScratch, tvRef, tvRef2
3372 template <bool saveResult>
3373 inline void OPTBLD_INLINE VMExecutionContext::getHelperPost(
3374 unsigned ndiscard, TypedValue*& tvRet, TypedValue& tvScratch,
3375 TypedValue& tvRef, TypedValue& tvRef2) {
3376 // Clean up all ndiscard elements on the stack. Actually discard
3377 // only ndiscard - 1, and overwrite the last cell with the result,
3378 // or if ndiscard is zero we actually need to allocate a cell.
3379 for (unsigned depth = 0; depth < ndiscard; ++depth) {
3380 TypedValue* tv = m_stack.indTV(depth);
3381 tvRefcountedDecRef(tv);
3384 if (!ndiscard) {
3385 tvRet = m_stack.allocTV();
3386 } else {
3387 m_stack.ndiscard(ndiscard - 1);
3388 tvRet = m_stack.topTV();
3390 tvRefcountedDecRef(&tvRef);
3391 tvRefcountedDecRef(&tvRef2);
3393 if (saveResult) {
3394 // If tvRef wasn't just allocated, we've already decref'd it in
3395 // the loop above.
3396 memcpy(tvRet, &tvScratch, sizeof(TypedValue));
3400 #define GETHELPER_ARGS \
3401 pc, ndiscard, tvRet, base, tvScratch, tvLiteral, \
3402 tvRef, tvRef2, mcode, curMember
3403 inline void OPTBLD_INLINE
3404 VMExecutionContext::getHelper(PC& pc,
3405 unsigned& ndiscard,
3406 TypedValue*& tvRet,
3407 TypedValue*& base,
3408 TypedValue& tvScratch,
3409 TypedValue& tvLiteral,
3410 TypedValue& tvRef,
3411 TypedValue& tvRef2,
3412 MemberCode& mcode,
3413 TypedValue*& curMember) {
3414 getHelperPre<true, true, ConsumeAll>(MEMBERHELPERPRE_ARGS);
3415 getHelperPost<true>(GETHELPERPOST_ARGS);
3418 void
3419 VMExecutionContext::getElem(TypedValue* base, TypedValue* key,
3420 TypedValue* dest) {
3421 assert(base->m_type != KindOfArray);
3422 VMRegAnchor _;
3423 tvWriteUninit(dest);
3424 TypedValue* result = Elem<true>(*dest, *dest, base, key);
3425 if (result != dest) {
3426 tvDup(result, dest);
3430 template <bool setMember,
3431 bool warn,
3432 bool define,
3433 bool unset,
3434 bool reffy,
3435 unsigned mdepth, // extra args on stack for set (e.g. rhs)
3436 VMExecutionContext::VectorLeaveCode mleave,
3437 bool saveResult>
3438 inline bool OPTBLD_INLINE VMExecutionContext::memberHelperPre(
3439 PC& pc, unsigned& ndiscard, TypedValue*& base,
3440 TypedValue& tvScratch, TypedValue& tvLiteral,
3441 TypedValue& tvRef, TypedValue& tvRef2,
3442 MemberCode& mcode, TypedValue*& curMember) {
3443 // The caller must move pc to the vector immediate before calling
3444 // {get, set}HelperPre.
3445 const ImmVector immVec = ImmVector::createFromStream(pc);
3446 const uint8_t* vec = immVec.vec();
3447 assert(immVec.size() > 0);
3449 // PC needs to be advanced before we do anything, otherwise if we
3450 // raise a notice in the middle of this we could resume at the wrong
3451 // instruction.
3452 pc += immVec.size() + sizeof(int32_t) + sizeof(int32_t);
3454 if (!setMember) {
3455 assert(mdepth == 0);
3456 assert(!define);
3457 assert(!unset);
3460 ndiscard = immVec.numStackValues();
3461 int depth = mdepth + ndiscard - 1;
3462 const LocationCode lcode = LocationCode(*vec++);
3464 TypedValue* loc = nullptr;
3465 TypedValue dummy;
3466 Class* const ctx = arGetContextClass(getFP());
3468 StringData* name;
3469 TypedValue* fr = nullptr;
3470 TypedValue* cref;
3471 TypedValue* pname;
3472 tvWriteUninit(&tvScratch);
3474 switch (lcode) {
3475 case LNL:
3476 loc = frame_local_inner(m_fp, decodeVariableSizeImm(&vec));
3477 goto lcodeName;
3478 case LNC:
3479 loc = m_stack.indTV(depth--);
3480 goto lcodeName;
3482 lcodeName:
3483 if (define) {
3484 lookupd_var(m_fp, name, loc, fr);
3485 } else {
3486 lookup_var(m_fp, name, loc, fr);
3488 if (fr == nullptr) {
3489 if (warn) {
3490 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3492 tvWriteNull(&dummy);
3493 loc = &dummy;
3494 } else {
3495 loc = fr;
3497 decRefStr(name);
3498 break;
3500 case LGL:
3501 loc = frame_local_inner(m_fp, decodeVariableSizeImm(&vec));
3502 goto lcodeGlobal;
3503 case LGC:
3504 loc = m_stack.indTV(depth--);
3505 goto lcodeGlobal;
3507 lcodeGlobal:
3508 if (define) {
3509 lookupd_gbl(m_fp, name, loc, fr);
3510 } else {
3511 lookup_gbl(m_fp, name, loc, fr);
3513 if (fr == nullptr) {
3514 if (warn) {
3515 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3517 tvWriteNull(&dummy);
3518 loc = &dummy;
3519 } else {
3520 loc = fr;
3522 decRefStr(name);
3523 break;
3525 case LSC:
3526 cref = m_stack.indTV(mdepth);
3527 pname = m_stack.indTV(depth--);
3528 goto lcodeSprop;
3529 case LSL:
3530 cref = m_stack.indTV(mdepth);
3531 pname = frame_local_inner(m_fp, decodeVariableSizeImm(&vec));
3532 goto lcodeSprop;
3534 lcodeSprop: {
3535 bool visible, accessible;
3536 assert(cref->m_type == KindOfClass);
3537 const Class* class_ = cref->m_data.pcls;
3538 StringData* name = lookup_name(pname);
3539 loc = class_->getSProp(ctx, name, visible, accessible);
3540 if (!(visible && accessible)) {
3541 raise_error("Invalid static property access: %s::%s",
3542 class_->name()->data(),
3543 name->data());
3545 decRefStr(name);
3546 break;
3549 case LL: {
3550 int localInd = decodeVariableSizeImm(&vec);
3551 loc = frame_local_inner(m_fp, localInd);
3552 if (warn) {
3553 if (loc->m_type == KindOfUninit) {
3554 raise_notice(Strings::UNDEFINED_VARIABLE,
3555 m_fp->m_func->localVarName(localInd)->data());
3558 break;
3560 case LC:
3561 case LR:
3562 loc = m_stack.indTV(depth--);
3563 break;
3564 case LH:
3565 assert(m_fp->hasThis());
3566 tvScratch.m_type = KindOfObject;
3567 tvScratch.m_data.pobj = m_fp->getThis();
3568 loc = &tvScratch;
3569 break;
3571 default: not_reached();
3574 base = loc;
3575 tvWriteUninit(&tvLiteral);
3576 tvWriteUninit(&tvRef);
3577 tvWriteUninit(&tvRef2);
3579 // Iterate through the members.
3580 while (vec < pc) {
3581 mcode = MemberCode(*vec++);
3582 if (memberCodeHasImm(mcode)) {
3583 int64_t memberImm = decodeMemberCodeImm(&vec, mcode);
3584 if (memberCodeImmIsString(mcode)) {
3585 tvAsVariant(&tvLiteral) =
3586 m_fp->m_func->unit()->lookupLitstrId(memberImm);
3587 assert(!IS_REFCOUNTED_TYPE(tvLiteral.m_type));
3588 curMember = &tvLiteral;
3589 } else if (mcode == MEI) {
3590 tvAsVariant(&tvLiteral) = memberImm;
3591 curMember = &tvLiteral;
3592 } else {
3593 assert(memberCodeImmIsLoc(mcode));
3594 curMember = frame_local_inner(m_fp, memberImm);
3596 } else {
3597 curMember = (setMember && mcode == MW) ? nullptr : m_stack.indTV(depth--);
3600 if (mleave == LeaveLast) {
3601 if (vec >= pc) {
3602 assert(vec == pc);
3603 break;
3607 TypedValue* result;
3608 switch (mcode) {
3609 case MEL:
3610 case MEC:
3611 case MET:
3612 case MEI:
3613 if (unset) {
3614 result = ElemU(tvScratch, tvRef, base, curMember);
3615 } else if (define) {
3616 result = ElemD<warn,reffy>(tvScratch, tvRef, base, curMember);
3617 } else {
3618 result = Elem<warn>(tvScratch, tvRef, base, curMember);
3620 break;
3621 case MPL:
3622 case MPC:
3623 case MPT:
3624 result = Prop<warn, define, unset>(tvScratch, tvRef, ctx, base,
3625 curMember);
3626 break;
3627 case MW:
3628 if (setMember) {
3629 assert(define);
3630 result = NewElem(tvScratch, tvRef, base);
3631 } else {
3632 raise_error("Cannot use [] for reading");
3633 result = nullptr;
3635 break;
3636 default:
3637 assert(false);
3638 result = nullptr; // Silence compiler warning.
3640 assert(result != nullptr);
3641 ratchetRefs(result, tvRef, tvRef2);
3642 // Check whether an error occurred (i.e. no result was set).
3643 if (setMember && result == &tvScratch && result->m_type == KindOfUninit) {
3644 return true;
3646 base = result;
3649 if (mleave == ConsumeAll) {
3650 assert(vec == pc);
3651 if (debug) {
3652 if (lcode == LSC || lcode == LSL) {
3653 assert(depth == int(mdepth));
3654 } else {
3655 assert(depth == int(mdepth) - 1);
3660 if (saveResult) {
3661 assert(!setMember);
3662 // If requested, save a copy of the result. If base already points to
3663 // tvScratch, no reference counting is necessary, because (with the
3664 // exception of the following block), tvScratch is never populated such
3665 // that it owns a reference that must be accounted for.
3666 if (base != &tvScratch) {
3667 // Acquire a reference to the result via tvDup(); base points to the
3668 // result but does not own a reference.
3669 tvDup(base, &tvScratch);
3673 return false;
3676 // The following arguments are outputs: (TODO put them in struct)
3677 // pc: bytecode instruction after the vector instruction
3678 // ndiscard: number of stack elements to discard
3679 // base: ultimate result of the vector-get
3680 // tvScratch: temporary result storage
3681 // tvRef: temporary result storage
3682 // tvRef2: temporary result storage
3683 // mcode: output MemberCode for the last member if LeaveLast
3684 // curMember: output last member value one if LeaveLast; but undefined
3685 // if the last mcode == MW
3686 template <bool warn,
3687 bool define,
3688 bool unset,
3689 bool reffy,
3690 unsigned mdepth, // extra args on stack for set (e.g. rhs)
3691 VMExecutionContext::VectorLeaveCode mleave>
3692 inline bool OPTBLD_INLINE VMExecutionContext::setHelperPre(
3693 PC& pc, unsigned& ndiscard, TypedValue*& base,
3694 TypedValue& tvScratch, TypedValue& tvLiteral,
3695 TypedValue& tvRef, TypedValue& tvRef2,
3696 MemberCode& mcode, TypedValue*& curMember) {
3697 return memberHelperPre<true, warn, define, unset,
3698 reffy, mdepth, mleave, false>(MEMBERHELPERPRE_ARGS);
3701 #define SETHELPERPOST_ARGS ndiscard, tvRef, tvRef2
3702 template <unsigned mdepth>
3703 inline void OPTBLD_INLINE VMExecutionContext::setHelperPost(
3704 unsigned ndiscard, TypedValue& tvRef, TypedValue& tvRef2) {
3705 // Clean up the stack. Decref all the elements for the vector, but
3706 // leave the first mdepth (they are not part of the vector data).
3707 for (unsigned depth = mdepth; depth-mdepth < ndiscard; ++depth) {
3708 TypedValue* tv = m_stack.indTV(depth);
3709 tvRefcountedDecRef(tv);
3712 // NOTE: currently the only instructions using this that have return
3713 // values on the stack also have more inputs than the -vector, so
3714 // mdepth > 0. They also always return the original top value of
3715 // the stack.
3716 if (mdepth > 0) {
3717 assert(mdepth == 1 &&
3718 "We don't really support mdepth > 1 in setHelperPost");
3720 if (ndiscard > 0) {
3721 TypedValue* retSrc = m_stack.topTV();
3722 TypedValue* dest = m_stack.indTV(ndiscard + mdepth - 1);
3723 assert(dest != retSrc);
3724 memcpy(dest, retSrc, sizeof *dest);
3728 m_stack.ndiscard(ndiscard);
3729 tvRefcountedDecRef(&tvRef);
3730 tvRefcountedDecRef(&tvRef2);
3733 inline void OPTBLD_INLINE VMExecutionContext::iopLowInvalid(PC& pc) {
3734 fprintf(stderr, "invalid bytecode executed\n");
3735 abort();
3738 inline void OPTBLD_INLINE VMExecutionContext::iopNop(PC& pc) {
3739 NEXT();
3742 inline void OPTBLD_INLINE VMExecutionContext::iopPopC(PC& pc) {
3743 NEXT();
3744 m_stack.popC();
3747 inline void OPTBLD_INLINE VMExecutionContext::iopPopV(PC& pc) {
3748 NEXT();
3749 m_stack.popV();
3752 inline void OPTBLD_INLINE VMExecutionContext::iopPopR(PC& pc) {
3753 NEXT();
3754 if (m_stack.topTV()->m_type != KindOfRef) {
3755 m_stack.popC();
3756 } else {
3757 m_stack.popV();
3761 inline void OPTBLD_INLINE VMExecutionContext::iopDup(PC& pc) {
3762 NEXT();
3763 m_stack.dup();
3766 inline void OPTBLD_INLINE VMExecutionContext::iopBox(PC& pc) {
3767 NEXT();
3768 m_stack.box();
3771 inline void OPTBLD_INLINE VMExecutionContext::iopUnbox(PC& pc) {
3772 NEXT();
3773 m_stack.unbox();
3776 inline void OPTBLD_INLINE VMExecutionContext::iopBoxR(PC& pc) {
3777 NEXT();
3778 TypedValue* tv = m_stack.topTV();
3779 if (tv->m_type != KindOfRef) {
3780 tvBox(tv);
3784 inline void OPTBLD_INLINE VMExecutionContext::iopUnboxR(PC& pc) {
3785 NEXT();
3786 if (m_stack.topTV()->m_type == KindOfRef) {
3787 m_stack.unbox();
3791 inline void OPTBLD_INLINE VMExecutionContext::iopNull(PC& pc) {
3792 NEXT();
3793 m_stack.pushNull();
3796 inline void OPTBLD_INLINE VMExecutionContext::iopNullUninit(PC& pc) {
3797 NEXT();
3798 m_stack.pushNullUninit();
3801 inline void OPTBLD_INLINE VMExecutionContext::iopTrue(PC& pc) {
3802 NEXT();
3803 m_stack.pushTrue();
3806 inline void OPTBLD_INLINE VMExecutionContext::iopFalse(PC& pc) {
3807 NEXT();
3808 m_stack.pushFalse();
3811 inline void OPTBLD_INLINE VMExecutionContext::iopFile(PC& pc) {
3812 NEXT();
3813 const StringData* s = m_fp->m_func->unit()->filepath();
3814 m_stack.pushStaticString(const_cast<StringData*>(s));
3817 inline void OPTBLD_INLINE VMExecutionContext::iopDir(PC& pc) {
3818 NEXT();
3819 const StringData* s = m_fp->m_func->unit()->dirpath();
3820 m_stack.pushStaticString(const_cast<StringData*>(s));
3823 inline void OPTBLD_INLINE VMExecutionContext::iopInt(PC& pc) {
3824 NEXT();
3825 DECODE(int64_t, i);
3826 m_stack.pushInt(i);
3829 inline void OPTBLD_INLINE VMExecutionContext::iopDouble(PC& pc) {
3830 NEXT();
3831 DECODE(double, d);
3832 m_stack.pushDouble(d);
3835 inline void OPTBLD_INLINE VMExecutionContext::iopString(PC& pc) {
3836 NEXT();
3837 DECODE_LITSTR(s);
3838 m_stack.pushStaticString(s);
3841 inline void OPTBLD_INLINE VMExecutionContext::iopArray(PC& pc) {
3842 NEXT();
3843 DECODE(Id, id);
3844 ArrayData* a = m_fp->m_func->unit()->lookupArrayId(id);
3845 m_stack.pushStaticArray(a);
3848 inline void OPTBLD_INLINE VMExecutionContext::iopNewArray(PC& pc) {
3849 NEXT();
3850 // Clever sizing avoids extra work in HphpArray construction.
3851 auto arr = ArrayData::Make(size_t(3U) << (HphpArray::MinLgTableSize-2));
3852 m_stack.pushArray(arr);
3855 inline void OPTBLD_INLINE VMExecutionContext::iopNewTuple(PC& pc) {
3856 NEXT();
3857 DECODE_IVA(n);
3858 // This constructor moves values, no inc/decref is necessary.
3859 HphpArray* arr = ArrayData::Make(n, m_stack.topC());
3860 m_stack.ndiscard(n);
3861 m_stack.pushArray(arr);
3864 inline void OPTBLD_INLINE VMExecutionContext::iopAddElemC(PC& pc) {
3865 NEXT();
3866 Cell* c1 = m_stack.topC();
3867 Cell* c2 = m_stack.indC(1);
3868 Cell* c3 = m_stack.indC(2);
3869 if (c3->m_type != KindOfArray) {
3870 raise_error("AddElemC: $3 must be an array");
3872 if (c2->m_type == KindOfInt64) {
3873 tvCellAsVariant(c3).asArrRef().set(c2->m_data.num, tvAsCVarRef(c1));
3874 } else {
3875 tvCellAsVariant(c3).asArrRef().set(tvAsCVarRef(c2), tvAsCVarRef(c1));
3877 m_stack.popC();
3878 m_stack.popC();
3881 inline void OPTBLD_INLINE VMExecutionContext::iopAddElemV(PC& pc) {
3882 NEXT();
3883 Var* v1 = m_stack.topV();
3884 Cell* c2 = m_stack.indC(1);
3885 Cell* c3 = m_stack.indC(2);
3886 if (c3->m_type != KindOfArray) {
3887 raise_error("AddElemV: $3 must be an array");
3889 if (c2->m_type == KindOfInt64) {
3890 tvCellAsVariant(c3).asArrRef().set(c2->m_data.num, ref(tvAsCVarRef(v1)));
3891 } else {
3892 tvCellAsVariant(c3).asArrRef().set(tvAsCVarRef(c2), ref(tvAsCVarRef(v1)));
3894 m_stack.popV();
3895 m_stack.popC();
3898 inline void OPTBLD_INLINE VMExecutionContext::iopAddNewElemC(PC& pc) {
3899 NEXT();
3900 Cell* c1 = m_stack.topC();
3901 Cell* c2 = m_stack.indC(1);
3902 if (c2->m_type != KindOfArray) {
3903 raise_error("AddNewElemC: $2 must be an array");
3905 tvCellAsVariant(c2).asArrRef().append(tvAsCVarRef(c1));
3906 m_stack.popC();
3909 inline void OPTBLD_INLINE VMExecutionContext::iopAddNewElemV(PC& pc) {
3910 NEXT();
3911 Var* v1 = m_stack.topV();
3912 Cell* c2 = m_stack.indC(1);
3913 if (c2->m_type != KindOfArray) {
3914 raise_error("AddNewElemV: $2 must be an array");
3916 tvCellAsVariant(c2).asArrRef().append(ref(tvAsCVarRef(v1)));
3917 m_stack.popV();
3920 inline void OPTBLD_INLINE VMExecutionContext::iopNewCol(PC& pc) {
3921 NEXT();
3922 DECODE_IVA(cType);
3923 DECODE_IVA(nElms);
3924 ObjectData* obj;
3925 switch (cType) {
3926 case Collection::VectorType: obj = NEWOBJ(c_Vector)(); break;
3927 case Collection::MapType: obj = NEWOBJ(c_Map)(); break;
3928 case Collection::StableMapType: obj = NEWOBJ(c_StableMap)(); break;
3929 case Collection::SetType: obj = NEWOBJ(c_Set)(); break;
3930 case Collection::PairType: obj = NEWOBJ(c_Pair)(); break;
3931 default:
3932 obj = nullptr;
3933 raise_error("NewCol: Invalid collection type");
3934 break;
3936 // Reserve enough room for nElms elements in advance
3937 if (nElms) {
3938 collectionReserve(obj, nElms);
3940 m_stack.pushObject(obj);
3943 inline void OPTBLD_INLINE VMExecutionContext::iopColAddNewElemC(PC& pc) {
3944 NEXT();
3945 Cell* c1 = m_stack.topC();
3946 Cell* c2 = m_stack.indC(1);
3947 if (c2->m_type == KindOfObject && c2->m_data.pobj->isCollection()) {
3948 collectionAppend(c2->m_data.pobj, c1);
3949 } else {
3950 raise_error("ColAddNewElemC: $2 must be a collection");
3952 m_stack.popC();
3955 inline void OPTBLD_INLINE VMExecutionContext::iopColAddElemC(PC& pc) {
3956 NEXT();
3957 Cell* c1 = m_stack.topC();
3958 Cell* c2 = m_stack.indC(1);
3959 Cell* c3 = m_stack.indC(2);
3960 if (c3->m_type == KindOfObject && c3->m_data.pobj->isCollection()) {
3961 collectionSet(c3->m_data.pobj, c2, c1);
3962 } else {
3963 raise_error("ColAddElemC: $3 must be a collection");
3965 m_stack.popC();
3966 m_stack.popC();
3969 inline void OPTBLD_INLINE VMExecutionContext::iopCns(PC& pc) {
3970 NEXT();
3971 DECODE_LITSTR(s);
3972 TypedValue* cns = Unit::loadCns(s);
3973 if (cns == nullptr) {
3974 raise_notice(Strings::UNDEFINED_CONSTANT, s->data(), s->data());
3975 m_stack.pushStaticString(s);
3976 return;
3978 Cell* c1 = m_stack.allocC();
3979 tvReadCell(cns, c1);
3982 inline void OPTBLD_INLINE VMExecutionContext::iopCnsE(PC& pc) {
3983 NEXT();
3984 DECODE_LITSTR(s);
3985 TypedValue* cns = Unit::loadCns(s);
3986 if (cns == nullptr) {
3987 raise_error("Undefined constant '%s'", s->data());
3989 Cell* c1 = m_stack.allocC();
3990 tvReadCell(cns, c1);
3993 inline void OPTBLD_INLINE VMExecutionContext::iopCnsU(PC& pc) {
3994 NEXT();
3995 DECODE_LITSTR(name);
3996 DECODE_LITSTR(fallback);
3997 TypedValue* cns = Unit::loadCns(name);
3998 if (cns == nullptr) {
3999 cns = Unit::loadCns(fallback);
4000 if (cns == nullptr) {
4001 raise_notice(
4002 Strings::UNDEFINED_CONSTANT,
4003 fallback->data(),
4004 fallback->data()
4006 m_stack.pushStaticString(fallback);
4007 return;
4010 Cell* c1 = m_stack.allocC();
4011 tvReadCell(cns, c1);
4014 inline void OPTBLD_INLINE VMExecutionContext::iopDefCns(PC& pc) {
4015 NEXT();
4016 DECODE_LITSTR(s);
4017 TypedValue* tv = m_stack.topTV();
4018 tvAsVariant(tv) = Unit::defCns(s, tv);
4021 inline void OPTBLD_INLINE VMExecutionContext::iopClsCns(PC& pc) {
4022 NEXT();
4023 DECODE_LITSTR(clsCnsName);
4024 TypedValue* tv = m_stack.topTV();
4025 assert(tv->m_type == KindOfClass);
4026 Class* class_ = tv->m_data.pcls;
4027 assert(class_ != nullptr);
4028 TypedValue* clsCns = class_->clsCnsGet(clsCnsName);
4029 if (clsCns == nullptr) {
4030 raise_error("Couldn't find constant %s::%s",
4031 class_->name()->data(), clsCnsName->data());
4033 tvReadCell(clsCns, tv);
4036 inline void OPTBLD_INLINE VMExecutionContext::iopClsCnsD(PC& pc) {
4037 NEXT();
4038 DECODE_LITSTR(clsCnsName);
4039 DECODE(Id, classId);
4040 const NamedEntityPair& classNamedEntity =
4041 m_fp->m_func->unit()->lookupNamedEntityPairId(classId);
4043 TypedValue* clsCns = lookupClsCns(classNamedEntity.second,
4044 classNamedEntity.first, clsCnsName);
4045 assert(clsCns != nullptr);
4046 Cell* c1 = m_stack.allocC();
4047 tvReadCell(clsCns, c1);
4050 inline void OPTBLD_INLINE VMExecutionContext::iopConcat(PC& pc) {
4051 NEXT();
4052 Cell* c1 = m_stack.topC();
4053 Cell* c2 = m_stack.indC(1);
4054 if (IS_STRING_TYPE(c1->m_type) && IS_STRING_TYPE(c2->m_type)) {
4055 tvCellAsVariant(c2) = concat(tvCellAsVariant(c2), tvCellAsCVarRef(c1));
4056 } else {
4057 tvCellAsVariant(c2) = concat(tvCellAsVariant(c2).toString(),
4058 tvCellAsCVarRef(c1).toString());
4060 assert(c2->m_data.pstr->getCount() > 0);
4061 m_stack.popC();
4064 #define MATHOP(OP, VOP) do { \
4065 NEXT(); \
4066 Cell* c1 = m_stack.topC(); \
4067 Cell* c2 = m_stack.indC(1); \
4068 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
4069 int64_t a = c2->m_data.num; \
4070 int64_t b = c1->m_data.num; \
4071 MATHOP_DIVCHECK(0) \
4072 c2->m_data.num = a OP b; \
4073 m_stack.popX(); \
4075 MATHOP_DOUBLE(OP) \
4076 else { \
4077 tvCellAsVariant(c2) = VOP(tvCellAsVariant(c2), tvCellAsCVarRef(c1)); \
4078 m_stack.popC(); \
4080 } while (0)
4081 #define MATHOP_DOUBLE(OP) \
4082 else if (c2->m_type == KindOfDouble \
4083 && c1->m_type == KindOfDouble) { \
4084 double a = c2->m_data.dbl; \
4085 double b = c1->m_data.dbl; \
4086 MATHOP_DIVCHECK(0.0) \
4087 c2->m_data.dbl = a OP b; \
4088 m_stack.popX(); \
4090 #define MATHOP_DIVCHECK(x)
4091 inline void OPTBLD_INLINE VMExecutionContext::iopAdd(PC& pc) {
4092 MATHOP(+, plus);
4095 inline void OPTBLD_INLINE VMExecutionContext::iopSub(PC& pc) {
4096 MATHOP(-, minus);
4099 inline void OPTBLD_INLINE VMExecutionContext::iopMul(PC& pc) {
4100 MATHOP(*, multiply);
4102 #undef MATHOP_DIVCHECK
4104 #define MATHOP_DIVCHECK(x) \
4105 if (b == x) { \
4106 raise_warning(Strings::DIVISION_BY_ZERO); \
4107 c2->m_data.num = 0; \
4108 c2->m_type = KindOfBoolean; \
4109 } else
4110 inline void OPTBLD_INLINE VMExecutionContext::iopDiv(PC& pc) {
4111 NEXT();
4112 Cell* c1 = m_stack.topC(); // denominator
4113 Cell* c2 = m_stack.indC(1); // numerator
4114 // Special handling for evenly divisible ints
4115 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64
4116 && c1->m_data.num != 0 && c2->m_data.num % c1->m_data.num == 0) {
4117 int64_t b = c1->m_data.num;
4118 MATHOP_DIVCHECK(0)
4119 c2->m_data.num /= b;
4120 m_stack.popX();
4122 MATHOP_DOUBLE(/)
4123 else {
4124 tvCellAsVariant(c2) = divide(tvCellAsVariant(c2), tvCellAsCVarRef(c1));
4125 m_stack.popC();
4128 #undef MATHOP_DOUBLE
4130 #define MATHOP_DOUBLE(OP)
4131 inline void OPTBLD_INLINE VMExecutionContext::iopMod(PC& pc) {
4132 MATHOP(%, modulo);
4134 #undef MATHOP_DOUBLE
4135 #undef MATHOP_DIVCHECK
4137 #define LOGICOP(OP) do { \
4138 NEXT(); \
4139 Cell* c1 = m_stack.topC(); \
4140 Cell* c2 = m_stack.indC(1); \
4142 tvCellAsVariant(c2) = \
4143 (bool)(bool(tvCellAsVariant(c2)) OP bool(tvCellAsVariant(c1))); \
4145 m_stack.popC(); \
4146 } while (0)
4148 inline void OPTBLD_INLINE VMExecutionContext::iopXor(PC& pc) {
4149 LOGICOP(^);
4151 #undef LOGICOP
4153 inline void OPTBLD_INLINE VMExecutionContext::iopNot(PC& pc) {
4154 NEXT();
4155 Cell* c1 = m_stack.topC();
4156 tvCellAsVariant(c1) = !bool(tvCellAsVariant(c1));
4159 #define CMPOP(OP, VOP) do { \
4160 NEXT(); \
4161 Cell* c1 = m_stack.topC(); \
4162 Cell* c2 = m_stack.indC(1); \
4163 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
4164 int64_t a = c2->m_data.num; \
4165 int64_t b = c1->m_data.num; \
4166 c2->m_data.num = (a OP b); \
4167 c2->m_type = KindOfBoolean; \
4168 m_stack.popX(); \
4169 } else { \
4170 int64_t result = VOP(tvCellAsVariant(c2), tvCellAsCVarRef(c1)); \
4171 tvRefcountedDecRefCell(c2); \
4172 c2->m_data.num = result; \
4173 c2->m_type = KindOfBoolean; \
4174 m_stack.popC(); \
4176 } while (0)
4177 inline void OPTBLD_INLINE VMExecutionContext::iopSame(PC& pc) {
4178 CMPOP(==, same);
4181 inline void OPTBLD_INLINE VMExecutionContext::iopNSame(PC& pc) {
4182 CMPOP(!=, !same);
4185 inline void OPTBLD_INLINE VMExecutionContext::iopEq(PC& pc) {
4186 CMPOP(==, equal);
4189 inline void OPTBLD_INLINE VMExecutionContext::iopNeq(PC& pc) {
4190 CMPOP(!=, !equal);
4193 inline void OPTBLD_INLINE VMExecutionContext::iopLt(PC& pc) {
4194 CMPOP(<, less);
4197 inline void OPTBLD_INLINE VMExecutionContext::iopLte(PC& pc) {
4198 CMPOP(<=, less_or_equal);
4201 inline void OPTBLD_INLINE VMExecutionContext::iopGt(PC& pc) {
4202 CMPOP(>, more);
4205 inline void OPTBLD_INLINE VMExecutionContext::iopGte(PC& pc) {
4206 CMPOP(>=, more_or_equal);
4208 #undef CMPOP
4210 #define MATHOP_DOUBLE(OP)
4211 #define MATHOP_DIVCHECK(x)
4212 inline void OPTBLD_INLINE VMExecutionContext::iopBitAnd(PC& pc) {
4213 MATHOP(&, bitwise_and);
4216 inline void OPTBLD_INLINE VMExecutionContext::iopBitOr(PC& pc) {
4217 MATHOP(|, bitwise_or);
4220 inline void OPTBLD_INLINE VMExecutionContext::iopBitXor(PC& pc) {
4221 MATHOP(^, bitwise_xor);
4223 #undef MATHOP
4224 #undef MATHOP_DOUBLE
4225 #undef MATHOP_DIVCHECK
4227 inline void OPTBLD_INLINE VMExecutionContext::iopBitNot(PC& pc) {
4228 NEXT();
4229 Cell* c1 = m_stack.topC();
4230 if (LIKELY(c1->m_type == KindOfInt64)) {
4231 c1->m_data.num = ~c1->m_data.num;
4232 } else if (c1->m_type == KindOfDouble) {
4233 c1->m_type = KindOfInt64;
4234 c1->m_data.num = ~int64_t(c1->m_data.dbl);
4235 } else if (IS_STRING_TYPE(c1->m_type)) {
4236 tvCellAsVariant(c1) = ~tvCellAsVariant(c1);
4237 } else {
4238 raise_error("Unsupported operand type for ~");
4242 #define SHIFTOP(OP) do { \
4243 NEXT(); \
4244 Cell* c1 = m_stack.topC(); \
4245 Cell* c2 = m_stack.indC(1); \
4246 if (c2->m_type == KindOfInt64 && c1->m_type == KindOfInt64) { \
4247 int64_t a = c2->m_data.num; \
4248 int64_t b = c1->m_data.num; \
4249 c2->m_data.num = a OP b; \
4250 m_stack.popX(); \
4251 } else { \
4252 tvCellAsVariant(c2) = tvCellAsVariant(c2).toInt64() OP \
4253 tvCellAsCVarRef(c1).toInt64(); \
4254 m_stack.popC(); \
4256 } while (0)
4257 inline void OPTBLD_INLINE VMExecutionContext::iopShl(PC& pc) {
4258 SHIFTOP(<<);
4261 inline void OPTBLD_INLINE VMExecutionContext::iopShr(PC& pc) {
4262 SHIFTOP(>>);
4264 #undef SHIFTOP
4266 inline void OPTBLD_INLINE VMExecutionContext::iopCastBool(PC& pc) {
4267 NEXT();
4268 Cell* c1 = m_stack.topC();
4269 tvCastToBooleanInPlace(c1);
4272 inline void OPTBLD_INLINE VMExecutionContext::iopCastInt(PC& pc) {
4273 NEXT();
4274 Cell* c1 = m_stack.topC();
4275 tvCastToInt64InPlace(c1);
4278 inline void OPTBLD_INLINE VMExecutionContext::iopCastDouble(PC& pc) {
4279 NEXT();
4280 Cell* c1 = m_stack.topC();
4281 tvCastToDoubleInPlace(c1);
4284 inline void OPTBLD_INLINE VMExecutionContext::iopCastString(PC& pc) {
4285 NEXT();
4286 Cell* c1 = m_stack.topC();
4287 tvCastToStringInPlace(c1);
4290 inline void OPTBLD_INLINE VMExecutionContext::iopCastArray(PC& pc) {
4291 NEXT();
4292 Cell* c1 = m_stack.topC();
4293 tvCastToArrayInPlace(c1);
4296 inline void OPTBLD_INLINE VMExecutionContext::iopCastObject(PC& pc) {
4297 NEXT();
4298 Cell* c1 = m_stack.topC();
4299 tvCastToObjectInPlace(c1);
4302 inline bool OPTBLD_INLINE VMExecutionContext::cellInstanceOf(
4303 TypedValue* tv, const NamedEntity* ne) {
4304 assert(tv->m_type != KindOfRef);
4305 if (tv->m_type == KindOfObject) {
4306 Class* cls = Unit::lookupClass(ne);
4307 if (cls) return tv->m_data.pobj->instanceof(cls);
4308 } else if (tv->m_type == KindOfArray) {
4309 Class* cls = Unit::lookupClass(ne);
4310 if (cls && interface_supports_array(cls->name())) {
4311 return true;
4314 return false;
4317 inline void OPTBLD_INLINE VMExecutionContext::iopInstanceOf(PC& pc) {
4318 NEXT();
4319 Cell* c1 = m_stack.topC(); // c2 instanceof c1
4320 Cell* c2 = m_stack.indC(1);
4321 bool r = false;
4322 if (IS_STRING_TYPE(c1->m_type)) {
4323 const NamedEntity* rhs = Unit::GetNamedEntity(c1->m_data.pstr);
4324 r = cellInstanceOf(c2, rhs);
4325 } else if (c1->m_type == KindOfObject) {
4326 if (c2->m_type == KindOfObject) {
4327 ObjectData* lhs = c2->m_data.pobj;
4328 ObjectData* rhs = c1->m_data.pobj;
4329 r = lhs->instanceof(rhs->getVMClass());
4331 } else {
4332 raise_error("Class name must be a valid object or a string");
4334 m_stack.popC();
4335 tvRefcountedDecRefCell(c2);
4336 c2->m_data.num = r;
4337 c2->m_type = KindOfBoolean;
4340 inline void OPTBLD_INLINE VMExecutionContext::iopInstanceOfD(PC& pc) {
4341 NEXT();
4342 DECODE(Id, id);
4343 if (shouldProfile()) {
4344 Class::profileInstanceOf(m_fp->m_func->unit()->lookupLitstrId(id));
4346 const NamedEntity* ne = m_fp->m_func->unit()->lookupNamedEntityId(id);
4347 Cell* c1 = m_stack.topC();
4348 bool r = cellInstanceOf(c1, ne);
4349 tvRefcountedDecRefCell(c1);
4350 c1->m_data.num = r;
4351 c1->m_type = KindOfBoolean;
4354 inline void OPTBLD_INLINE VMExecutionContext::iopPrint(PC& pc) {
4355 NEXT();
4356 Cell* c1 = m_stack.topC();
4357 print(tvCellAsVariant(c1).toString());
4358 tvRefcountedDecRefCell(c1);
4359 c1->m_type = KindOfInt64;
4360 c1->m_data.num = 1;
4363 inline void OPTBLD_INLINE VMExecutionContext::iopClone(PC& pc) {
4364 NEXT();
4365 TypedValue* tv = m_stack.topTV();
4366 if (tv->m_type != KindOfObject) {
4367 raise_error("clone called on non-object");
4369 ObjectData* obj = tv->m_data.pobj;
4370 const Class* class_ UNUSED = obj->getVMClass();
4371 ObjectData* newobj = obj->clone();
4372 m_stack.popTV();
4373 m_stack.pushNull();
4374 tv->m_type = KindOfObject;
4375 tv->m_data.pobj = newobj;
4378 inline int OPTBLD_INLINE
4379 VMExecutionContext::handleUnwind(UnwindStatus unwindType) {
4380 int longJumpType;
4381 if (unwindType == UnwindPropagate) {
4382 longJumpType = EXCEPTION_PROPAGATE;
4383 if (m_nestedVMs.empty()) {
4384 m_fp = nullptr;
4385 m_pc = nullptr;
4387 } else {
4388 assert(unwindType == UnwindResumeVM);
4389 longJumpType = EXCEPTION_RESUMEVM;
4391 return longJumpType;
4394 inline void OPTBLD_INLINE VMExecutionContext::iopExit(PC& pc) {
4395 NEXT();
4396 int exitCode = 0;
4397 Cell* c1 = m_stack.topC();
4398 if (c1->m_type == KindOfInt64) {
4399 exitCode = c1->m_data.num;
4400 } else {
4401 print(tvCellAsVariant(c1).toString());
4403 m_stack.popC();
4404 throw ExitException(exitCode);
4407 inline void OPTBLD_INLINE VMExecutionContext::iopFatal(PC& pc) {
4408 NEXT();
4409 TypedValue* top = m_stack.topTV();
4410 std::string msg;
4411 DECODE_IVA(skipFrame);
4412 if (IS_STRING_TYPE(top->m_type)) {
4413 msg = top->m_data.pstr->data();
4414 } else {
4415 msg = "Fatal error message not a string";
4417 m_stack.popTV();
4418 if (skipFrame) {
4419 raise_error_without_first_frame(msg);
4420 } else {
4421 raise_error(msg);
4425 #define JMP_SURPRISE_CHECK() \
4426 if (offset < 0 && UNLIKELY(Transl::TargetCache::loadConditionFlags())) { \
4427 SYNC(); \
4428 EventHook::CheckSurprise(); \
4431 inline void OPTBLD_INLINE VMExecutionContext::iopJmp(PC& pc) {
4432 NEXT();
4433 DECODE_JMP(Offset, offset);
4434 JMP_SURPRISE_CHECK();
4435 pc += offset - 1;
4438 #define JMPOP(OP, VOP) do { \
4439 Cell* c1 = m_stack.topC(); \
4440 if (c1->m_type == KindOfInt64 || c1->m_type == KindOfBoolean) { \
4441 int64_t n = c1->m_data.num; \
4442 if (n OP 0) { \
4443 NEXT(); \
4444 DECODE_JMP(Offset, offset); \
4445 JMP_SURPRISE_CHECK(); \
4446 pc += offset - 1; \
4447 m_stack.popX(); \
4448 } else { \
4449 pc += 1 + sizeof(Offset); \
4450 m_stack.popX(); \
4452 } else { \
4453 if (VOP(tvCellAsCVarRef(c1))) { \
4454 NEXT(); \
4455 DECODE_JMP(Offset, offset); \
4456 JMP_SURPRISE_CHECK(); \
4457 pc += offset - 1; \
4458 m_stack.popC(); \
4459 } else { \
4460 pc += 1 + sizeof(Offset); \
4461 m_stack.popC(); \
4464 } while (0)
4465 inline void OPTBLD_INLINE VMExecutionContext::iopJmpZ(PC& pc) {
4466 JMPOP(==, !bool);
4469 inline void OPTBLD_INLINE VMExecutionContext::iopJmpNZ(PC& pc) {
4470 JMPOP(!=, bool);
4472 #undef JMPOP
4473 #undef JMP_SURPRISE_CHECK
4475 enum SwitchMatch {
4476 MATCH_NORMAL, // value was converted to an int: match normally
4477 MATCH_NONZERO, // can't be converted to an int: match first nonzero case
4478 MATCH_DEFAULT, // can't be converted to an int: match default case
4481 static SwitchMatch doubleCheck(double d, int64_t& out) {
4482 if (int64_t(d) == d) {
4483 out = d;
4484 return MATCH_NORMAL;
4485 } else {
4486 return MATCH_DEFAULT;
4490 inline void OPTBLD_INLINE VMExecutionContext::iopSwitch(PC& pc) {
4491 PC origPC = pc;
4492 NEXT();
4493 DECODE(int32_t, veclen);
4494 assert(veclen > 0);
4495 Offset* jmptab = (Offset*)pc;
4496 pc += veclen * sizeof(*jmptab);
4497 DECODE(int64_t, base);
4498 DECODE_IVA(bounded);
4500 TypedValue* val = m_stack.topTV();
4501 if (!bounded) {
4502 assert(val->m_type == KindOfInt64);
4503 // Continuation switch: no bounds checking needed
4504 int64_t label = val->m_data.num;
4505 m_stack.popX();
4506 assert(label >= 0 && label < veclen);
4507 pc = origPC + jmptab[label];
4508 } else {
4509 // Generic integer switch
4510 int64_t intval;
4511 SwitchMatch match = MATCH_NORMAL;
4513 switch (val->m_type) {
4514 case KindOfUninit:
4515 case KindOfNull:
4516 intval = 0;
4517 break;
4519 case KindOfBoolean:
4520 // bool(true) is equal to any non-zero int, bool(false) == 0
4521 if (val->m_data.num) {
4522 match = MATCH_NONZERO;
4523 } else {
4524 intval = 0;
4526 break;
4528 case KindOfInt64:
4529 intval = val->m_data.num;
4530 break;
4532 case KindOfDouble:
4533 match = doubleCheck(val->m_data.dbl, intval);
4534 break;
4536 case KindOfStaticString:
4537 case KindOfString: {
4538 double dval = 0.0;
4539 DataType t = val->m_data.pstr->isNumericWithVal(intval, dval, 1);
4540 switch (t) {
4541 case KindOfNull:
4542 intval = 0;
4543 break;
4545 case KindOfDouble:
4546 match = doubleCheck(dval, intval);
4547 break;
4549 case KindOfInt64:
4550 // do nothing
4551 break;
4553 default:
4554 not_reached();
4556 tvRefcountedDecRef(val);
4557 break;
4560 case KindOfArray:
4561 match = MATCH_DEFAULT;
4562 tvDecRef(val);
4563 break;
4565 case KindOfObject:
4566 intval = val->m_data.pobj->o_toInt64();
4567 tvDecRef(val);
4568 break;
4570 default:
4571 not_reached();
4573 m_stack.discard();
4575 if (match != MATCH_NORMAL ||
4576 intval < base || intval >= (base + veclen - 2)) {
4577 switch (match) {
4578 case MATCH_NORMAL:
4579 case MATCH_DEFAULT:
4580 pc = origPC + jmptab[veclen - 1];
4581 break;
4583 case MATCH_NONZERO:
4584 pc = origPC + jmptab[veclen - 2];
4585 break;
4587 } else {
4588 pc = origPC + jmptab[intval - base];
4593 inline void OPTBLD_INLINE VMExecutionContext::iopSSwitch(PC& pc) {
4594 PC origPC = pc;
4595 NEXT();
4596 DECODE(int32_t, veclen);
4597 assert(veclen > 1);
4598 unsigned cases = veclen - 1; // the last vector item is the default case
4599 StrVecItem* jmptab = (StrVecItem*)pc;
4600 pc += veclen * sizeof(*jmptab);
4602 TypedValue* val = m_stack.topTV();
4603 Unit* u = m_fp->m_func->unit();
4604 unsigned i;
4605 for (i = 0; i < cases; ++i) {
4606 auto& item = jmptab[i];
4607 const StringData* str = u->lookupLitstrId(item.str);
4608 if (tvAsVariant(val).equal(str)) {
4609 pc = origPC + item.dest;
4610 break;
4613 if (i == cases) {
4614 // default case
4615 pc = origPC + jmptab[veclen-1].dest;
4617 m_stack.popC();
4620 inline void OPTBLD_INLINE VMExecutionContext::iopRetC(PC& pc) {
4621 NEXT();
4622 uint soff = m_fp->m_soff;
4623 assert(!m_fp->m_func->isGenerator());
4625 // Call the runtime helpers to free the local variables and iterators
4626 frame_free_locals_inl(m_fp, m_fp->m_func->numLocals());
4627 ActRec* sfp = m_fp->arGetSfp();
4628 // Memcpy the the return value on top of the activation record. This works
4629 // the same regardless of whether the return value is boxed or not.
4630 TypedValue* retval_ptr = &m_fp->m_r;
4631 memcpy(retval_ptr, m_stack.topTV(), sizeof(TypedValue));
4632 // Adjust the stack
4633 m_stack.ndiscard(m_fp->m_func->numSlotsInFrame() + 1);
4635 if (LIKELY(sfp != m_fp)) {
4636 // Restore caller's execution state.
4637 m_fp = sfp;
4638 pc = m_fp->m_func->unit()->entry() + m_fp->m_func->base() + soff;
4639 m_stack.ret();
4640 assert(m_stack.topTV() == retval_ptr);
4641 } else {
4642 // No caller; terminate.
4643 m_stack.ret();
4644 #ifdef HPHP_TRACE
4646 std::ostringstream os;
4647 m_stack.toStringElm(os, m_stack.topTV(), m_fp);
4648 ONTRACE(1,
4649 Trace::trace("Return %s from VMExecutionContext::dispatch("
4650 "%p)\n", os.str().c_str(), m_fp));
4652 #endif
4653 pc = 0;
4657 inline void OPTBLD_INLINE VMExecutionContext::iopRetV(PC& pc) {
4658 iopRetC(pc);
4661 inline void OPTBLD_INLINE VMExecutionContext::iopUnwind(PC& pc) {
4662 assert(!m_faults.empty());
4663 assert(m_faults.back().m_savedRaiseOffset != kInvalidOffset);
4664 throw VMPrepareUnwind();
4667 inline void OPTBLD_INLINE VMExecutionContext::iopThrow(PC& pc) {
4668 Cell* c1 = m_stack.topC();
4669 if (c1->m_type != KindOfObject ||
4670 !static_cast<Instance*>(c1->m_data.pobj)->
4671 instanceof(SystemLib::s_ExceptionClass)) {
4672 raise_error("Exceptions must be valid objects derived from the "
4673 "Exception base class");
4676 Object obj(c1->m_data.pobj);
4677 m_stack.popC();
4678 DEBUGGER_ATTACHED_ONLY(phpDebuggerExceptionHook(obj.get()));
4679 throw obj;
4682 inline void OPTBLD_INLINE VMExecutionContext::iopAGetC(PC& pc) {
4683 NEXT();
4684 TypedValue* tv = m_stack.topTV();
4685 lookupClsRef(tv, tv, true);
4688 inline void OPTBLD_INLINE VMExecutionContext::iopAGetL(PC& pc) {
4689 NEXT();
4690 DECODE_HA(local);
4691 TypedValue* top = m_stack.allocTV();
4692 TypedValue* fr = frame_local_inner(m_fp, local);
4693 lookupClsRef(fr, top);
4696 static void raise_undefined_local(ActRec* fp, Id pind) {
4697 assert(pind < fp->m_func->numNamedLocals());
4698 raise_notice(Strings::UNDEFINED_VARIABLE,
4699 fp->m_func->localVarName(pind)->data());
4702 static inline void cgetl_inner_body(TypedValue* fr, TypedValue* to) {
4703 assert(fr->m_type != KindOfUninit);
4704 tvDup(fr, to);
4705 if (to->m_type == KindOfRef) {
4706 tvUnbox(to);
4710 static inline void cgetl_body(ActRec* fp,
4711 TypedValue* fr,
4712 TypedValue* to,
4713 Id pind) {
4714 if (fr->m_type == KindOfUninit) {
4715 // `to' is uninitialized here, so we need to tvWriteNull before
4716 // possibly causing stack unwinding.
4717 tvWriteNull(to);
4718 raise_undefined_local(fp, pind);
4719 } else {
4720 cgetl_inner_body(fr, to);
4724 inline void OPTBLD_INLINE VMExecutionContext::iopCGetL(PC& pc) {
4725 NEXT();
4726 DECODE_HA(local);
4727 Cell* to = m_stack.allocC();
4728 TypedValue* fr = frame_local(m_fp, local);
4729 cgetl_body(m_fp, fr, to, local);
4732 inline void OPTBLD_INLINE VMExecutionContext::iopCGetL2(PC& pc) {
4733 NEXT();
4734 DECODE_HA(local);
4735 TypedValue* oldTop = m_stack.topTV();
4736 TypedValue* newTop = m_stack.allocTV();
4737 memcpy(newTop, oldTop, sizeof *newTop);
4738 Cell* to = oldTop;
4739 TypedValue* fr = frame_local(m_fp, local);
4740 cgetl_body(m_fp, fr, to, local);
4743 inline void OPTBLD_INLINE VMExecutionContext::iopCGetL3(PC& pc) {
4744 NEXT();
4745 DECODE_HA(local);
4746 TypedValue* oldTop = m_stack.topTV();
4747 TypedValue* oldSubTop = m_stack.indTV(1);
4748 TypedValue* newTop = m_stack.allocTV();
4749 memmove(newTop, oldTop, sizeof *oldTop * 2);
4750 Cell* to = oldSubTop;
4751 TypedValue* fr = frame_local(m_fp, local);
4752 cgetl_body(m_fp, fr, to, local);
4755 inline void OPTBLD_INLINE VMExecutionContext::iopCGetN(PC& pc) {
4756 NEXT();
4757 StringData* name;
4758 TypedValue* to = m_stack.topTV();
4759 TypedValue* fr = nullptr;
4760 lookup_var(m_fp, name, to, fr);
4761 if (fr == nullptr || fr->m_type == KindOfUninit) {
4762 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
4763 tvRefcountedDecRefCell(to);
4764 tvWriteNull(to);
4765 } else {
4766 tvRefcountedDecRefCell(to);
4767 cgetl_inner_body(fr, to);
4769 decRefStr(name); // TODO(#1146727): leaks during exceptions
4772 inline void OPTBLD_INLINE VMExecutionContext::iopCGetG(PC& pc) {
4773 NEXT();
4774 StringData* name;
4775 TypedValue* to = m_stack.topTV();
4776 TypedValue* fr = nullptr;
4777 lookup_gbl(m_fp, name, to, fr);
4778 if (fr == nullptr) {
4779 if (MoreWarnings) {
4780 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
4782 tvRefcountedDecRefCell(to);
4783 tvWriteNull(to);
4784 } else if (fr->m_type == KindOfUninit) {
4785 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
4786 tvRefcountedDecRefCell(to);
4787 tvWriteNull(to);
4788 } else {
4789 tvRefcountedDecRefCell(to);
4790 cgetl_inner_body(fr, to);
4792 decRefStr(name); // TODO(#1146727): leaks during exceptions
4795 #define SPROP_OP_PRELUDE \
4796 NEXT(); \
4797 TypedValue* clsref = m_stack.topTV(); \
4798 TypedValue* nameCell = m_stack.indTV(1); \
4799 TypedValue* output = nameCell; \
4800 TypedValue* val; \
4801 bool visible, accessible; \
4802 lookup_sprop(m_fp, clsref, name, nameCell, val, visible, \
4803 accessible);
4805 #define SPROP_OP_POSTLUDE \
4806 decRefStr(name);
4808 #define GETS(box) do { \
4809 SPROP_OP_PRELUDE \
4810 if (!(visible && accessible)) { \
4811 raise_error("Invalid static property access: %s::%s", \
4812 clsref->m_data.pcls->name()->data(), \
4813 name->data()); \
4815 if (box) { \
4816 if (val->m_type != KindOfRef) { \
4817 tvBox(val); \
4819 tvDupVar(val, output); \
4820 } else { \
4821 tvReadCell(val, output); \
4823 m_stack.popA(); \
4824 SPROP_OP_POSTLUDE \
4825 } while (0)
4827 inline void OPTBLD_INLINE VMExecutionContext::iopCGetS(PC& pc) {
4828 StringData* name;
4829 GETS(false);
4830 if (shouldProfile() && name && name->isStatic()) {
4831 recordType(TypeProfileKey(TypeProfileKey::StaticPropName, name),
4832 m_stack.top()->m_type);
4836 inline void OPTBLD_INLINE VMExecutionContext::iopCGetM(PC& pc) {
4837 PC oldPC = pc;
4838 NEXT();
4839 DECLARE_GETHELPER_ARGS
4840 getHelper(GETHELPER_ARGS);
4841 if (tvRet->m_type == KindOfRef) {
4842 tvUnbox(tvRet);
4844 assert(hasImmVector(*oldPC));
4845 const ImmVector& immVec = ImmVector::createFromStream(oldPC + 1);
4846 StringData* name;
4847 MemberCode mc;
4848 if (immVec.decodeLastMember(curUnit(), name, mc)) {
4849 recordType(TypeProfileKey(mc, name), m_stack.top()->m_type);
4853 static inline void vgetl_body(TypedValue* fr, TypedValue* to) {
4854 if (fr->m_type != KindOfRef) {
4855 tvBox(fr);
4857 tvDup(fr, to);
4860 inline void OPTBLD_INLINE VMExecutionContext::iopVGetL(PC& pc) {
4861 NEXT();
4862 DECODE_HA(local);
4863 Var* to = m_stack.allocV();
4864 TypedValue* fr = frame_local(m_fp, local);
4865 vgetl_body(fr, to);
4868 inline void OPTBLD_INLINE VMExecutionContext::iopVGetN(PC& pc) {
4869 NEXT();
4870 StringData* name;
4871 TypedValue* to = m_stack.topTV();
4872 TypedValue* fr = nullptr;
4873 lookupd_var(m_fp, name, to, fr);
4874 assert(fr != nullptr);
4875 tvRefcountedDecRefCell(to);
4876 vgetl_body(fr, to);
4877 decRefStr(name);
4880 inline void OPTBLD_INLINE VMExecutionContext::iopVGetG(PC& pc) {
4881 NEXT();
4882 StringData* name;
4883 TypedValue* to = m_stack.topTV();
4884 TypedValue* fr = nullptr;
4885 lookupd_gbl(m_fp, name, to, fr);
4886 assert(fr != nullptr);
4887 tvRefcountedDecRefCell(to);
4888 vgetl_body(fr, to);
4889 decRefStr(name);
4892 inline void OPTBLD_INLINE VMExecutionContext::iopVGetS(PC& pc) {
4893 StringData* name;
4894 GETS(true);
4896 #undef GETS
4898 inline void OPTBLD_INLINE VMExecutionContext::iopVGetM(PC& pc) {
4899 NEXT();
4900 DECLARE_SETHELPER_ARGS
4901 TypedValue* tv1 = m_stack.allocTV();
4902 tvWriteUninit(tv1);
4903 if (!setHelperPre<false, true, false, true, 1,
4904 ConsumeAll>(MEMBERHELPERPRE_ARGS)) {
4905 if (base->m_type != KindOfRef) {
4906 tvBox(base);
4908 tvDupVar(base, tv1);
4909 } else {
4910 tvWriteNull(tv1);
4911 tvBox(tv1);
4913 setHelperPost<1>(SETHELPERPOST_ARGS);
4916 inline void OPTBLD_INLINE VMExecutionContext::iopIssetN(PC& pc) {
4917 NEXT();
4918 StringData* name;
4919 TypedValue* tv1 = m_stack.topTV();
4920 TypedValue* tv = nullptr;
4921 bool e;
4922 lookup_var(m_fp, name, tv1, tv);
4923 if (tv == nullptr) {
4924 e = false;
4925 } else {
4926 e = isset(tvAsCVarRef(tv));
4928 tvRefcountedDecRefCell(tv1);
4929 tv1->m_data.num = e;
4930 tv1->m_type = KindOfBoolean;
4931 decRefStr(name);
4934 inline void OPTBLD_INLINE VMExecutionContext::iopIssetG(PC& pc) {
4935 NEXT();
4936 StringData* name;
4937 TypedValue* tv1 = m_stack.topTV();
4938 TypedValue* tv = nullptr;
4939 bool e;
4940 lookup_gbl(m_fp, name, tv1, tv);
4941 if (tv == nullptr) {
4942 e = false;
4943 } else {
4944 e = isset(tvAsCVarRef(tv));
4946 tvRefcountedDecRefCell(tv1);
4947 tv1->m_data.num = e;
4948 tv1->m_type = KindOfBoolean;
4949 decRefStr(name);
4952 inline void OPTBLD_INLINE VMExecutionContext::iopIssetS(PC& pc) {
4953 StringData* name;
4954 SPROP_OP_PRELUDE
4955 bool e;
4956 if (!(visible && accessible)) {
4957 e = false;
4958 } else {
4959 e = isset(tvAsCVarRef(val));
4961 m_stack.popA();
4962 output->m_data.num = e;
4963 output->m_type = KindOfBoolean;
4964 SPROP_OP_POSTLUDE
4967 inline void OPTBLD_INLINE VMExecutionContext::iopIssetM(PC& pc) {
4968 NEXT();
4969 DECLARE_GETHELPER_ARGS
4970 getHelperPre<false, false, LeaveLast>(MEMBERHELPERPRE_ARGS);
4971 // Process last member specially, in order to employ the IssetElem/IssetProp
4972 // operations. (TODO combine with EmptyM.)
4973 bool issetResult = false;
4974 switch (mcode) {
4975 case MEL:
4976 case MEC:
4977 case MET:
4978 case MEI: {
4979 issetResult = IssetEmptyElem<false>(tvScratch, tvRef, base, curMember);
4980 break;
4982 case MPL:
4983 case MPC:
4984 case MPT: {
4985 Class* ctx = arGetContextClass(m_fp);
4986 issetResult = IssetEmptyProp<false>(ctx, base, curMember);
4987 break;
4989 default: assert(false);
4991 getHelperPost<false>(GETHELPERPOST_ARGS);
4992 tvRet->m_data.num = issetResult;
4993 tvRet->m_type = KindOfBoolean;
4996 #define IOP_TYPE_CHECK_INSTR_L(checkInit, what, predicate) \
4997 inline void OPTBLD_INLINE VMExecutionContext::iopIs ## what ## L(PC& pc) { \
4998 NEXT(); \
4999 DECODE_HA(local); \
5000 TypedValue* tv = frame_local(m_fp, local); \
5001 if (checkInit && tv->m_type == KindOfUninit) { \
5002 raise_undefined_local(m_fp, local); \
5004 bool ret = predicate(tvAsCVarRef(tv)); \
5005 TypedValue* topTv = m_stack.allocTV(); \
5006 topTv->m_data.num = ret; \
5007 topTv->m_type = KindOfBoolean; \
5010 #define IOP_TYPE_CHECK_INSTR_C(checkInit, what, predicate) \
5011 inline void OPTBLD_INLINE VMExecutionContext::iopIs ## what ## C(PC& pc) { \
5012 NEXT(); \
5013 TypedValue* topTv = m_stack.topTV(); \
5014 assert(topTv->m_type != KindOfRef); \
5015 bool ret = predicate(tvAsCVarRef(topTv)); \
5016 tvRefcountedDecRefCell(topTv); \
5017 topTv->m_data.num = ret; \
5018 topTv->m_type = KindOfBoolean; \
5021 #define IOP_TYPE_CHECK_INSTR(checkInit, what, predicate) \
5022 IOP_TYPE_CHECK_INSTR_L(checkInit, what, predicate) \
5023 IOP_TYPE_CHECK_INSTR_C(checkInit, what, predicate) \
5025 IOP_TYPE_CHECK_INSTR_L(false, set, isset)
5026 IOP_TYPE_CHECK_INSTR(true, Null, is_null)
5027 IOP_TYPE_CHECK_INSTR(true, Array, is_array)
5028 IOP_TYPE_CHECK_INSTR(true, String, is_string)
5029 IOP_TYPE_CHECK_INSTR(true, Object, is_object)
5030 IOP_TYPE_CHECK_INSTR(true, Int, is_int)
5031 IOP_TYPE_CHECK_INSTR(true, Double, is_double)
5032 IOP_TYPE_CHECK_INSTR(true, Bool, is_bool)
5033 #undef IOP_TYPE_CHECK_INSTR
5035 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyL(PC& pc) {
5036 NEXT();
5037 DECODE_HA(local);
5038 TypedValue* loc = frame_local(m_fp, local);
5039 bool e = empty(tvAsCVarRef(loc));
5040 TypedValue* tv1 = m_stack.allocTV();
5041 tv1->m_data.num = e;
5042 tv1->m_type = KindOfBoolean;
5045 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyN(PC& pc) {
5046 NEXT();
5047 StringData* name;
5048 TypedValue* tv1 = m_stack.topTV();
5049 TypedValue* tv = nullptr;
5050 bool e;
5051 lookup_var(m_fp, name, tv1, tv);
5052 if (tv == nullptr) {
5053 e = true;
5054 } else {
5055 e = empty(tvAsCVarRef(tv));
5057 tvRefcountedDecRefCell(tv1);
5058 tv1->m_data.num = e;
5059 tv1->m_type = KindOfBoolean;
5060 decRefStr(name);
5063 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyG(PC& pc) {
5064 NEXT();
5065 StringData* name;
5066 TypedValue* tv1 = m_stack.topTV();
5067 TypedValue* tv = nullptr;
5068 bool e;
5069 lookup_gbl(m_fp, name, tv1, tv);
5070 if (tv == nullptr) {
5071 e = true;
5072 } else {
5073 e = empty(tvAsCVarRef(tv));
5075 tvRefcountedDecRefCell(tv1);
5076 tv1->m_data.num = e;
5077 tv1->m_type = KindOfBoolean;
5078 decRefStr(name);
5081 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyS(PC& pc) {
5082 StringData* name;
5083 SPROP_OP_PRELUDE
5084 bool e;
5085 if (!(visible && accessible)) {
5086 e = true;
5087 } else {
5088 e = empty(tvAsCVarRef(val));
5090 m_stack.popA();
5091 output->m_data.num = e;
5092 output->m_type = KindOfBoolean;
5093 SPROP_OP_POSTLUDE
5096 inline void OPTBLD_INLINE VMExecutionContext::iopEmptyM(PC& pc) {
5097 NEXT();
5098 DECLARE_GETHELPER_ARGS
5099 getHelperPre<false, false, LeaveLast>(MEMBERHELPERPRE_ARGS);
5100 // Process last member specially, in order to employ the EmptyElem/EmptyProp
5101 // operations. (TODO combine with IssetM)
5102 bool emptyResult = false;
5103 switch (mcode) {
5104 case MEL:
5105 case MEC:
5106 case MET:
5107 case MEI: {
5108 emptyResult = IssetEmptyElem<true>(tvScratch, tvRef, base, curMember);
5109 break;
5111 case MPL:
5112 case MPC:
5113 case MPT: {
5114 Class* ctx = arGetContextClass(m_fp);
5115 emptyResult = IssetEmptyProp<true>(ctx, base, curMember);
5116 break;
5118 default: assert(false);
5120 getHelperPost<false>(GETHELPERPOST_ARGS);
5121 tvRet->m_data.num = emptyResult;
5122 tvRet->m_type = KindOfBoolean;
5125 inline void OPTBLD_INLINE VMExecutionContext::iopAKExists(PC& pc) {
5126 NEXT();
5127 TypedValue* arr = m_stack.topTV();
5128 TypedValue* key = arr + 1;
5129 bool result = f_array_key_exists(tvAsCVarRef(key), tvAsCVarRef(arr));
5130 m_stack.popTV();
5131 tvRefcountedDecRef(key);
5132 key->m_data.num = result;
5133 key->m_type = KindOfBoolean;
5136 inline void OPTBLD_INLINE VMExecutionContext::iopArrayIdx(PC& pc) {
5137 NEXT();
5138 TypedValue* def = m_stack.topTV();
5139 TypedValue* arr = m_stack.indTV(1);
5140 TypedValue* key = m_stack.indTV(2);
5142 Variant result = f_hphp_array_idx(tvAsCVarRef(key),
5143 tvAsCVarRef(arr),
5144 tvAsCVarRef(def));
5145 m_stack.popTV();
5146 m_stack.popTV();
5147 tvAsVariant(key) = result;
5150 inline void OPTBLD_INLINE VMExecutionContext::iopSetL(PC& pc) {
5151 NEXT();
5152 DECODE_HA(local);
5153 assert(local < m_fp->m_func->numLocals());
5154 Cell* fr = m_stack.topC();
5155 TypedValue* to = frame_local(m_fp, local);
5156 tvSet(fr, to);
5159 inline void OPTBLD_INLINE VMExecutionContext::iopSetN(PC& pc) {
5160 NEXT();
5161 StringData* name;
5162 Cell* fr = m_stack.topC();
5163 TypedValue* tv2 = m_stack.indTV(1);
5164 TypedValue* to = nullptr;
5165 lookupd_var(m_fp, name, tv2, to);
5166 assert(to != nullptr);
5167 tvSet(fr, to);
5168 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
5169 m_stack.discard();
5170 decRefStr(name);
5173 inline void OPTBLD_INLINE VMExecutionContext::iopSetG(PC& pc) {
5174 NEXT();
5175 StringData* name;
5176 Cell* fr = m_stack.topC();
5177 TypedValue* tv2 = m_stack.indTV(1);
5178 TypedValue* to = nullptr;
5179 lookupd_gbl(m_fp, name, tv2, to);
5180 assert(to != nullptr);
5181 tvSet(fr, to);
5182 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
5183 m_stack.discard();
5184 decRefStr(name);
5187 inline void OPTBLD_INLINE VMExecutionContext::iopSetS(PC& pc) {
5188 NEXT();
5189 TypedValue* tv1 = m_stack.topTV();
5190 TypedValue* classref = m_stack.indTV(1);
5191 TypedValue* propn = m_stack.indTV(2);
5192 TypedValue* output = propn;
5193 StringData* name;
5194 TypedValue* val;
5195 bool visible, accessible;
5196 lookup_sprop(m_fp, classref, name, propn, val, visible, accessible);
5197 if (!(visible && accessible)) {
5198 raise_error("Invalid static property access: %s::%s",
5199 classref->m_data.pcls->name()->data(),
5200 name->data());
5202 tvSet(tv1, val);
5203 tvRefcountedDecRefCell(propn);
5204 memcpy(output, tv1, sizeof(TypedValue));
5205 m_stack.ndiscard(2);
5206 decRefStr(name);
5209 inline void OPTBLD_INLINE VMExecutionContext::iopSetM(PC& pc) {
5210 NEXT();
5211 DECLARE_SETHELPER_ARGS
5212 if (!setHelperPre<false, true, false, false, 1,
5213 LeaveLast>(MEMBERHELPERPRE_ARGS)) {
5214 Cell* c1 = m_stack.topC();
5216 if (mcode == MW) {
5217 SetNewElem<true>(base, c1);
5218 } else {
5219 switch (mcode) {
5220 case MEL:
5221 case MEC:
5222 case MET:
5223 case MEI: {
5224 StringData* result = SetElem<true>(base, curMember, c1);
5225 if (result) {
5226 tvRefcountedDecRefCell(c1);
5227 c1->m_type = KindOfString;
5228 c1->m_data.pstr = result;
5230 break;
5232 case MPL:
5233 case MPC:
5234 case MPT: {
5235 Class* ctx = arGetContextClass(m_fp);
5236 SetProp<true>(ctx, base, curMember, c1);
5237 break;
5239 default: assert(false);
5243 setHelperPost<1>(SETHELPERPOST_ARGS);
5246 inline void OPTBLD_INLINE VMExecutionContext::iopSetWithRefLM(PC& pc) {
5247 NEXT();
5248 DECLARE_SETHELPER_ARGS
5249 bool skip = setHelperPre<false, true, false, false, 0,
5250 ConsumeAll>(MEMBERHELPERPRE_ARGS);
5251 DECODE_HA(local);
5252 if (!skip) {
5253 TypedValue* from = frame_local(m_fp, local);
5254 tvAsVariant(base) = withRefBind(tvAsVariant(from));
5256 setHelperPost<0>(SETHELPERPOST_ARGS);
5259 inline void OPTBLD_INLINE VMExecutionContext::iopSetWithRefRM(PC& pc) {
5260 NEXT();
5261 DECLARE_SETHELPER_ARGS
5262 bool skip = setHelperPre<false, true, false, false, 1,
5263 ConsumeAll>(MEMBERHELPERPRE_ARGS);
5264 if (!skip) {
5265 TypedValue* from = m_stack.top();
5266 tvAsVariant(base) = withRefBind(tvAsVariant(from));
5268 setHelperPost<0>(SETHELPERPOST_ARGS);
5269 m_stack.popTV();
5272 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpL(PC& pc) {
5273 NEXT();
5274 DECODE_HA(local);
5275 DECODE(unsigned char, op);
5276 Cell* fr = m_stack.topC();
5277 TypedValue* to = frame_local(m_fp, local);
5278 SETOP_BODY(to, op, fr);
5279 tvRefcountedDecRefCell(fr);
5280 tvReadCell(to, fr);
5283 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpN(PC& pc) {
5284 NEXT();
5285 DECODE(unsigned char, op);
5286 StringData* name;
5287 Cell* fr = m_stack.topC();
5288 TypedValue* tv2 = m_stack.indTV(1);
5289 TypedValue* to = nullptr;
5290 // XXX We're probably not getting warnings totally correct here
5291 lookupd_var(m_fp, name, tv2, to);
5292 assert(to != nullptr);
5293 SETOP_BODY(to, op, fr);
5294 tvRefcountedDecRef(fr);
5295 tvRefcountedDecRef(tv2);
5296 tvReadCell(to, tv2);
5297 m_stack.discard();
5298 decRefStr(name);
5301 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpG(PC& pc) {
5302 NEXT();
5303 DECODE(unsigned char, op);
5304 StringData* name;
5305 Cell* fr = m_stack.topC();
5306 TypedValue* tv2 = m_stack.indTV(1);
5307 TypedValue* to = nullptr;
5308 // XXX We're probably not getting warnings totally correct here
5309 lookupd_gbl(m_fp, name, tv2, to);
5310 assert(to != nullptr);
5311 SETOP_BODY(to, op, fr);
5312 tvRefcountedDecRef(fr);
5313 tvRefcountedDecRef(tv2);
5314 tvReadCell(to, tv2);
5315 m_stack.discard();
5316 decRefStr(name);
5319 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpS(PC& pc) {
5320 NEXT();
5321 DECODE(unsigned char, op);
5322 Cell* fr = m_stack.topC();
5323 TypedValue* classref = m_stack.indTV(1);
5324 TypedValue* propn = m_stack.indTV(2);
5325 TypedValue* output = propn;
5326 StringData* name;
5327 TypedValue* val;
5328 bool visible, accessible;
5329 lookup_sprop(m_fp, classref, name, propn, val, visible, accessible);
5330 if (!(visible && accessible)) {
5331 raise_error("Invalid static property access: %s::%s",
5332 classref->m_data.pcls->name()->data(),
5333 name->data());
5335 SETOP_BODY(val, op, fr);
5336 tvRefcountedDecRefCell(propn);
5337 tvRefcountedDecRef(fr);
5338 tvReadCell(val, output);
5339 m_stack.ndiscard(2);
5340 decRefStr(name);
5343 inline void OPTBLD_INLINE VMExecutionContext::iopSetOpM(PC& pc) {
5344 NEXT();
5345 DECODE(unsigned char, op);
5346 DECLARE_SETHELPER_ARGS
5347 if (!setHelperPre<MoreWarnings, true, false, false, 1,
5348 LeaveLast>(MEMBERHELPERPRE_ARGS)) {
5349 TypedValue* result;
5350 Cell* rhs = m_stack.topC();
5352 if (mcode == MW) {
5353 result = SetOpNewElem(tvScratch, tvRef, op, base, rhs);
5354 } else {
5355 switch (mcode) {
5356 case MEL:
5357 case MEC:
5358 case MET:
5359 case MEI:
5360 result = SetOpElem(tvScratch, tvRef, op, base, curMember, rhs);
5361 break;
5362 case MPL:
5363 case MPC:
5364 case MPT: {
5365 Class *ctx = arGetContextClass(m_fp);
5366 result = SetOpProp(tvScratch, tvRef, ctx, op, base, curMember, rhs);
5367 break;
5369 default:
5370 assert(false);
5371 result = nullptr; // Silence compiler warning.
5375 tvRefcountedDecRef(rhs);
5376 tvReadCell(result, rhs);
5378 setHelperPost<1>(SETHELPERPOST_ARGS);
5381 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecL(PC& pc) {
5382 NEXT();
5383 DECODE_HA(local);
5384 DECODE(unsigned char, op);
5385 TypedValue* to = m_stack.allocTV();
5386 tvWriteUninit(to);
5387 TypedValue* fr = frame_local(m_fp, local);
5388 IncDecBody<true>(op, fr, to);
5391 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecN(PC& pc) {
5392 NEXT();
5393 DECODE(unsigned char, op);
5394 StringData* name;
5395 TypedValue* nameCell = m_stack.topTV();
5396 TypedValue* local = nullptr;
5397 // XXX We're probably not getting warnings totally correct here
5398 lookupd_var(m_fp, name, nameCell, local);
5399 assert(local != nullptr);
5400 IncDecBody<true>(op, local, nameCell);
5401 decRefStr(name);
5404 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecG(PC& pc) {
5405 NEXT();
5406 DECODE(unsigned char, op);
5407 StringData* name;
5408 TypedValue* nameCell = m_stack.topTV();
5409 TypedValue* gbl = nullptr;
5410 // XXX We're probably not getting warnings totally correct here
5411 lookupd_gbl(m_fp, name, nameCell, gbl);
5412 assert(gbl != nullptr);
5413 IncDecBody<true>(op, gbl, nameCell);
5414 decRefStr(name);
5417 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecS(PC& pc) {
5418 StringData* name;
5419 SPROP_OP_PRELUDE
5420 DECODE(unsigned char, op);
5421 if (!(visible && accessible)) {
5422 raise_error("Invalid static property access: %s::%s",
5423 clsref->m_data.pcls->name()->data(),
5424 name->data());
5426 tvRefcountedDecRefCell(nameCell);
5427 IncDecBody<true>(op, val, output);
5428 m_stack.discard();
5429 SPROP_OP_POSTLUDE
5432 inline void OPTBLD_INLINE VMExecutionContext::iopIncDecM(PC& pc) {
5433 NEXT();
5434 DECODE(unsigned char, op);
5435 DECLARE_SETHELPER_ARGS
5436 TypedValue to;
5437 tvWriteUninit(&to);
5438 if (!setHelperPre<MoreWarnings, true, false, false, 0,
5439 LeaveLast>(MEMBERHELPERPRE_ARGS)) {
5440 if (mcode == MW) {
5441 IncDecNewElem<true>(tvScratch, tvRef, op, base, to);
5442 } else {
5443 switch (mcode) {
5444 case MEL:
5445 case MEC:
5446 case MET:
5447 case MEI:
5448 IncDecElem<true>(tvScratch, tvRef, op, base, curMember, to);
5449 break;
5450 case MPL:
5451 case MPC:
5452 case MPT: {
5453 Class* ctx = arGetContextClass(m_fp);
5454 IncDecProp<true>(tvScratch, tvRef, ctx, op, base, curMember, to);
5455 break;
5457 default: assert(false);
5461 setHelperPost<0>(SETHELPERPOST_ARGS);
5462 Cell* c1 = m_stack.allocC();
5463 memcpy(c1, &to, sizeof(TypedValue));
5466 inline void OPTBLD_INLINE VMExecutionContext::iopBindL(PC& pc) {
5467 NEXT();
5468 DECODE_HA(local);
5469 Var* fr = m_stack.topV();
5470 TypedValue* to = frame_local(m_fp, local);
5471 tvBind(fr, to);
5474 inline void OPTBLD_INLINE VMExecutionContext::iopBindN(PC& pc) {
5475 NEXT();
5476 StringData* name;
5477 TypedValue* fr = m_stack.topTV();
5478 TypedValue* nameTV = m_stack.indTV(1);
5479 TypedValue* to = nullptr;
5480 lookupd_var(m_fp, name, nameTV, to);
5481 assert(to != nullptr);
5482 tvBind(fr, to);
5483 memcpy((void*)nameTV, (void*)fr, sizeof(TypedValue));
5484 m_stack.discard();
5485 decRefStr(name);
5488 inline void OPTBLD_INLINE VMExecutionContext::iopBindG(PC& pc) {
5489 NEXT();
5490 StringData* name;
5491 TypedValue* fr = m_stack.topTV();
5492 TypedValue* nameTV = m_stack.indTV(1);
5493 TypedValue* to = nullptr;
5494 lookupd_gbl(m_fp, name, nameTV, to);
5495 assert(to != nullptr);
5496 tvBind(fr, to);
5497 memcpy((void*)nameTV, (void*)fr, sizeof(TypedValue));
5498 m_stack.discard();
5499 decRefStr(name);
5502 inline void OPTBLD_INLINE VMExecutionContext::iopBindS(PC& pc) {
5503 NEXT();
5504 TypedValue* fr = m_stack.topTV();
5505 TypedValue* classref = m_stack.indTV(1);
5506 TypedValue* propn = m_stack.indTV(2);
5507 TypedValue* output = propn;
5508 StringData* name;
5509 TypedValue* val;
5510 bool visible, accessible;
5511 lookup_sprop(m_fp, classref, name, propn, val, visible, accessible);
5512 if (!(visible && accessible)) {
5513 raise_error("Invalid static property access: %s::%s",
5514 classref->m_data.pcls->name()->data(),
5515 name->data());
5517 tvBind(fr, val);
5518 tvRefcountedDecRefCell(propn);
5519 memcpy(output, fr, sizeof(TypedValue));
5520 m_stack.ndiscard(2);
5521 decRefStr(name);
5524 inline void OPTBLD_INLINE VMExecutionContext::iopBindM(PC& pc) {
5525 NEXT();
5526 DECLARE_SETHELPER_ARGS
5527 TypedValue* tv1 = m_stack.topTV();
5528 if (!setHelperPre<false, true, false, true, 1,
5529 ConsumeAll>(MEMBERHELPERPRE_ARGS)) {
5530 // Bind the element/property with the var on the top of the stack
5531 tvBind(tv1, base);
5533 setHelperPost<1>(SETHELPERPOST_ARGS);
5536 inline void OPTBLD_INLINE VMExecutionContext::iopUnsetL(PC& pc) {
5537 NEXT();
5538 DECODE_HA(local);
5539 assert(local < m_fp->m_func->numLocals());
5540 TypedValue* tv = frame_local(m_fp, local);
5541 tvRefcountedDecRef(tv);
5542 tvWriteUninit(tv);
5545 inline void OPTBLD_INLINE VMExecutionContext::iopUnsetN(PC& pc) {
5546 NEXT();
5547 StringData* name;
5548 TypedValue* tv1 = m_stack.topTV();
5549 TypedValue* tv = nullptr;
5550 lookup_var(m_fp, name, tv1, tv);
5551 assert(!m_fp->hasInvName());
5552 if (tv != nullptr) {
5553 tvRefcountedDecRef(tv);
5554 tvWriteUninit(tv);
5556 m_stack.popC();
5557 decRefStr(name);
5560 inline void OPTBLD_INLINE VMExecutionContext::iopUnsetG(PC& pc) {
5561 NEXT();
5562 TypedValue* tv1 = m_stack.topTV();
5563 StringData* name = lookup_name(tv1);
5564 VarEnv* varEnv = m_globalVarEnv;
5565 assert(varEnv != nullptr);
5566 varEnv->unset(name);
5567 m_stack.popC();
5568 decRefStr(name);
5571 inline void OPTBLD_INLINE VMExecutionContext::iopUnsetM(PC& pc) {
5572 NEXT();
5573 DECLARE_SETHELPER_ARGS
5574 if (!setHelperPre<false, false, true, false, 0,
5575 LeaveLast>(MEMBERHELPERPRE_ARGS)) {
5576 switch (mcode) {
5577 case MEL:
5578 case MEC:
5579 case MET:
5580 case MEI:
5581 UnsetElem(base, curMember);
5582 break;
5583 case MPL:
5584 case MPC:
5585 case MPT: {
5586 Class* ctx = arGetContextClass(m_fp);
5587 UnsetProp(ctx, base, curMember);
5588 break;
5590 default: assert(false);
5593 setHelperPost<0>(SETHELPERPOST_ARGS);
5596 inline ActRec* OPTBLD_INLINE VMExecutionContext::fPushFuncImpl(
5597 const Func* func,
5598 int numArgs) {
5599 DEBUGGER_IF(phpBreakpointEnabled(func->name()->data()));
5600 ActRec* ar = m_stack.allocA();
5601 arSetSfp(ar, m_fp);
5602 ar->m_func = func;
5603 ar->initNumArgs(numArgs);
5604 ar->setVarEnv(nullptr);
5605 return ar;
5608 inline void OPTBLD_INLINE VMExecutionContext::iopFPushFunc(PC& pc) {
5609 NEXT();
5610 DECODE_IVA(numArgs);
5611 Cell* c1 = m_stack.topC();
5612 const Func* func = nullptr;
5613 ObjectData* origObj = nullptr;
5614 StringData* origSd = nullptr;
5615 if (IS_STRING_TYPE(c1->m_type)) {
5616 origSd = c1->m_data.pstr;
5617 func = Unit::loadFunc(origSd);
5618 } else if (c1->m_type == KindOfObject) {
5619 static StringData* invokeName = StringData::GetStaticString("__invoke");
5620 origObj = c1->m_data.pobj;
5621 const Class* cls = origObj->getVMClass();
5622 func = cls->lookupMethod(invokeName);
5623 if (func == nullptr) {
5624 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5626 } else {
5627 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5629 if (func == nullptr) {
5630 raise_error("Call to undefined function %s()", c1->m_data.pstr->data());
5632 assert(!origObj || !origSd);
5633 assert(origObj || origSd);
5634 // We've already saved origObj or origSd; we'll use them after
5635 // overwriting the pointer on the stack. Don't refcount it now; defer
5636 // till after we're done with it.
5637 m_stack.discard();
5638 ActRec* ar = fPushFuncImpl(func, numArgs);
5639 if (origObj) {
5640 if (func->attrs() & AttrStatic && !func->isClosureBody()) {
5641 ar->setClass(origObj->getVMClass());
5642 decRefObj(origObj);
5643 } else {
5644 ar->setThis(origObj);
5645 // Teleport the reference from the destroyed stack cell to the
5646 // ActRec. Don't try this at home.
5648 } else {
5649 ar->setThis(nullptr);
5650 decRefStr(origSd);
5654 inline void OPTBLD_INLINE VMExecutionContext::iopFPushFuncD(PC& pc) {
5655 NEXT();
5656 DECODE_IVA(numArgs);
5657 DECODE(Id, id);
5658 const NamedEntityPair nep = m_fp->m_func->unit()->lookupNamedEntityPairId(id);
5659 Func* func = Unit::loadFunc(nep.second, nep.first);
5660 if (func == nullptr) {
5661 raise_error("Call to undefined function %s()",
5662 m_fp->m_func->unit()->lookupLitstrId(id)->data());
5664 ActRec* ar = fPushFuncImpl(func, numArgs);
5665 ar->setThis(nullptr);
5668 inline void OPTBLD_INLINE VMExecutionContext::iopFPushFuncU(PC& pc) {
5669 NEXT();
5670 DECODE_IVA(numArgs);
5671 DECODE(Id, nsFunc);
5672 DECODE(Id, globalFunc);
5673 Unit* unit = m_fp->m_func->unit();
5674 const NamedEntityPair nep = unit->lookupNamedEntityPairId(nsFunc);
5675 Func* func = Unit::loadFunc(nep.second, nep.first);
5676 if (func == nullptr) {
5677 const NamedEntityPair nep2 = unit->lookupNamedEntityPairId(globalFunc);
5678 func = Unit::loadFunc(nep2.second, nep2.first);
5679 if (func == nullptr) {
5680 const char *funcName = unit->lookupLitstrId(nsFunc)->data();
5681 raise_error("Call to undefined function %s()", funcName);
5684 ActRec* ar = fPushFuncImpl(func, numArgs);
5685 ar->setThis(nullptr);
5688 void VMExecutionContext::fPushObjMethodImpl(
5689 Class* cls, StringData* name, ObjectData* obj, int numArgs) {
5690 const Func* f;
5691 LookupResult res = lookupObjMethod(f, cls, name, true);
5692 assert(f);
5693 ActRec* ar = m_stack.allocA();
5694 arSetSfp(ar, m_fp);
5695 ar->m_func = f;
5696 if (res == MethodFoundNoThis) {
5697 decRefObj(obj);
5698 ar->setClass(cls);
5699 } else {
5700 assert(res == MethodFoundWithThis || res == MagicCallFound);
5701 /* Transfer ownership of obj to the ActRec*/
5702 ar->setThis(obj);
5704 ar->initNumArgs(numArgs);
5705 if (res == MagicCallFound) {
5706 ar->setInvName(name);
5707 } else {
5708 ar->setVarEnv(NULL);
5709 decRefStr(name);
5713 inline void OPTBLD_INLINE VMExecutionContext::iopFPushObjMethod(PC& pc) {
5714 NEXT();
5715 DECODE_IVA(numArgs);
5716 Cell* c1 = m_stack.topC(); // Method name.
5717 if (!IS_STRING_TYPE(c1->m_type)) {
5718 raise_error(Strings::METHOD_NAME_MUST_BE_STRING);
5720 Cell* c2 = m_stack.indC(1); // Object.
5721 if (c2->m_type != KindOfObject) {
5722 throw_call_non_object(c1->m_data.pstr->data());
5724 ObjectData* obj = c2->m_data.pobj;
5725 Class* cls = obj->getVMClass();
5726 StringData* name = c1->m_data.pstr;
5727 // We handle decReffing obj and name in fPushObjMethodImpl
5728 m_stack.ndiscard(2);
5729 fPushObjMethodImpl(cls, name, obj, numArgs);
5732 inline void OPTBLD_INLINE VMExecutionContext::iopFPushObjMethodD(PC& pc) {
5733 NEXT();
5734 DECODE_IVA(numArgs);
5735 DECODE_LITSTR(name);
5736 Cell* c1 = m_stack.topC();
5737 if (c1->m_type != KindOfObject) {
5738 throw_call_non_object(name->data());
5740 ObjectData* obj = c1->m_data.pobj;
5741 Class* cls = obj->getVMClass();
5742 // We handle decReffing obj in fPushObjMethodImpl
5743 m_stack.discard();
5744 fPushObjMethodImpl(cls, name, obj, numArgs);
5747 template<bool forwarding>
5748 void VMExecutionContext::pushClsMethodImpl(Class* cls,
5749 StringData* name,
5750 ObjectData* obj,
5751 int numArgs) {
5752 const Func* f;
5753 LookupResult res = lookupClsMethod(f, cls, name, obj, true);
5754 if (res == MethodFoundNoThis || res == MagicCallStaticFound) {
5755 obj = nullptr;
5756 } else {
5757 assert(obj);
5758 assert(res == MethodFoundWithThis || res == MagicCallFound);
5759 obj->incRefCount();
5761 assert(f);
5762 ActRec* ar = m_stack.allocA();
5763 arSetSfp(ar, m_fp);
5764 ar->m_func = f;
5765 if (obj) {
5766 ar->setThis(obj);
5767 } else {
5768 if (!forwarding) {
5769 ar->setClass(cls);
5770 } else {
5771 /* Propogate the current late bound class if there is one, */
5772 /* otherwise use the class given by this instruction's input */
5773 if (m_fp->hasThis()) {
5774 cls = m_fp->getThis()->getVMClass();
5775 } else if (m_fp->hasClass()) {
5776 cls = m_fp->getClass();
5778 ar->setClass(cls);
5781 ar->initNumArgs(numArgs);
5782 if (res == MagicCallFound || res == MagicCallStaticFound) {
5783 ar->setInvName(name);
5784 } else {
5785 ar->setVarEnv(nullptr);
5786 decRefStr(const_cast<StringData*>(name));
5790 inline void OPTBLD_INLINE VMExecutionContext::iopFPushClsMethod(PC& pc) {
5791 NEXT();
5792 DECODE_IVA(numArgs);
5793 Cell* c1 = m_stack.indC(1); // Method name.
5794 if (!IS_STRING_TYPE(c1->m_type)) {
5795 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5797 TypedValue* tv = m_stack.top();
5798 assert(tv->m_type == KindOfClass);
5799 Class* cls = tv->m_data.pcls;
5800 StringData* name = c1->m_data.pstr;
5801 // CLSMETHOD_BODY will take care of decReffing name
5802 m_stack.ndiscard(2);
5803 assert(cls && name);
5804 ObjectData* obj = m_fp->hasThis() ? m_fp->getThis() : nullptr;
5805 pushClsMethodImpl<false>(cls, name, obj, numArgs);
5808 inline void OPTBLD_INLINE VMExecutionContext::iopFPushClsMethodD(PC& pc) {
5809 NEXT();
5810 DECODE_IVA(numArgs);
5811 DECODE_LITSTR(name);
5812 DECODE(Id, classId);
5813 const NamedEntityPair &nep =
5814 m_fp->m_func->unit()->lookupNamedEntityPairId(classId);
5815 Class* cls = Unit::loadClass(nep.second, nep.first);
5816 if (cls == nullptr) {
5817 raise_error(Strings::UNKNOWN_CLASS, nep.first->data());
5819 ObjectData* obj = m_fp->hasThis() ? m_fp->getThis() : nullptr;
5820 pushClsMethodImpl<false>(cls, name, obj, numArgs);
5823 inline void OPTBLD_INLINE VMExecutionContext::iopFPushClsMethodF(PC& pc) {
5824 NEXT();
5825 DECODE_IVA(numArgs);
5826 Cell* c1 = m_stack.indC(1); // Method name.
5827 if (!IS_STRING_TYPE(c1->m_type)) {
5828 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5830 TypedValue* tv = m_stack.top();
5831 assert(tv->m_type == KindOfClass);
5832 Class* cls = tv->m_data.pcls;
5833 assert(cls);
5834 StringData* name = c1->m_data.pstr;
5835 // CLSMETHOD_BODY will take care of decReffing name
5836 m_stack.ndiscard(2);
5837 ObjectData* obj = m_fp->hasThis() ? m_fp->getThis() : nullptr;
5838 pushClsMethodImpl<true>(cls, name, obj, numArgs);
5841 #undef CLSMETHOD_BODY
5843 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCtor(PC& pc) {
5844 NEXT();
5845 DECODE_IVA(numArgs);
5846 TypedValue* tv = m_stack.topTV();
5847 assert(tv->m_type == KindOfClass);
5848 Class* cls = tv->m_data.pcls;
5849 assert(cls != nullptr);
5850 // Lookup the ctor
5851 const Func* f;
5852 LookupResult res UNUSED = lookupCtorMethod(f, cls, true);
5853 assert(res == MethodFoundWithThis);
5854 // Replace input with uninitialized instance.
5855 ObjectData* this_ = newInstance(cls);
5856 TRACE(2, "FPushCtor: just new'ed an instance of class %s: %p\n",
5857 cls->name()->data(), this_);
5858 this_->incRefCount();
5859 this_->incRefCount();
5860 tv->m_type = KindOfObject;
5861 tv->m_data.pobj = this_;
5862 // Push new activation record.
5863 ActRec* ar = m_stack.allocA();
5864 arSetSfp(ar, m_fp);
5865 ar->m_func = f;
5866 ar->setThis(this_);
5867 ar->initNumArgs(numArgs, true /* isFPushCtor */);
5868 arSetSfp(ar, m_fp);
5869 ar->setVarEnv(nullptr);
5872 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCtorD(PC& pc) {
5873 NEXT();
5874 DECODE_IVA(numArgs);
5875 DECODE(Id, id);
5876 const NamedEntityPair &nep =
5877 m_fp->m_func->unit()->lookupNamedEntityPairId(id);
5878 Class* cls = Unit::loadClass(nep.second, nep.first);
5879 if (cls == nullptr) {
5880 raise_error(Strings::UNKNOWN_CLASS,
5881 m_fp->m_func->unit()->lookupLitstrId(id)->data());
5883 // Lookup the ctor
5884 const Func* f;
5885 LookupResult res UNUSED = lookupCtorMethod(f, cls, true);
5886 assert(res == MethodFoundWithThis);
5887 // Push uninitialized instance.
5888 ObjectData* this_ = newInstance(cls);
5889 TRACE(2, "FPushCtorD: new'ed an instance of class %s: %p\n",
5890 cls->name()->data(), this_);
5891 this_->incRefCount();
5892 m_stack.pushObject(this_);
5893 // Push new activation record.
5894 ActRec* ar = m_stack.allocA();
5895 arSetSfp(ar, m_fp);
5896 ar->m_func = f;
5897 ar->setThis(this_);
5898 ar->initNumArgs(numArgs, true /* isFPushCtor */);
5899 ar->setVarEnv(nullptr);
5902 inline void OPTBLD_INLINE VMExecutionContext::iopDecodeCufIter(PC& pc) {
5903 PC origPc = pc;
5904 NEXT();
5905 DECODE_IA(itId);
5906 DECODE(Offset, offset);
5908 Iter* it = frame_iter(m_fp, itId);
5909 CufIter &cit = it->cuf();
5911 ObjectData* obj = nullptr;
5912 HPHP::Class* cls = nullptr;
5913 StringData* invName = nullptr;
5914 TypedValue *func = m_stack.topTV();
5916 ActRec* ar = m_fp;
5917 if (m_fp->m_func->isBuiltin()) {
5918 ar = getOuterVMFrame(ar);
5920 const Func* f = vm_decode_function(tvAsVariant(func),
5921 ar, false,
5922 obj, cls, invName,
5923 false);
5925 if (f == nullptr) {
5926 pc = origPc + offset;
5927 } else {
5928 cit.setFunc(f);
5929 if (obj) {
5930 cit.setCtx(obj);
5931 obj->incRefCount();
5932 } else {
5933 cit.setCtx(cls);
5935 cit.setName(invName);
5937 m_stack.popC();
5940 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCufIter(PC& pc) {
5941 NEXT();
5942 DECODE_IVA(numArgs);
5943 DECODE_IA(itId);
5945 Iter* it = frame_iter(m_fp, itId);
5947 auto f = it->cuf().func();
5948 auto o = it->cuf().ctx();
5949 auto n = it->cuf().name();
5951 ActRec* ar = m_stack.allocA();
5952 arSetSfp(ar, m_fp);
5953 ar->m_func = f;
5954 ar->m_this = (ObjectData*)o;
5955 if (o && !(uintptr_t(o) & 1)) ar->m_this->incRefCount();
5956 if (n) {
5957 ar->setInvName(n);
5958 n->incRefCount();
5959 } else {
5960 ar->setVarEnv(nullptr);
5962 ar->initNumArgs(numArgs, false /* isFPushCtor */);
5965 inline void OPTBLD_INLINE VMExecutionContext::doFPushCuf(PC& pc,
5966 bool forward,
5967 bool safe) {
5968 NEXT();
5969 DECODE_IVA(numArgs);
5971 TypedValue func = m_stack.topTV()[safe];
5973 ObjectData* obj = nullptr;
5974 HPHP::Class* cls = nullptr;
5975 StringData* invName = nullptr;
5977 const Func* f = vm_decode_function(tvAsVariant(&func), getFP(),
5978 forward,
5979 obj, cls, invName,
5980 !safe);
5982 if (safe) m_stack.topTV()[1] = m_stack.topTV()[0];
5983 m_stack.ndiscard(1);
5984 if (f == nullptr) {
5985 f = SystemLib::s_nullFunc;
5986 if (safe) {
5987 m_stack.pushFalse();
5989 } else if (safe) {
5990 m_stack.pushTrue();
5993 ActRec* ar = m_stack.allocA();
5994 arSetSfp(ar, m_fp);
5995 ar->m_func = f;
5996 if (obj) {
5997 ar->setThis(obj);
5998 obj->incRefCount();
5999 } else if (cls) {
6000 ar->setClass(cls);
6001 } else {
6002 ar->setThis(nullptr);
6004 ar->initNumArgs(numArgs, false /* isFPushCtor */);
6005 if (invName) {
6006 ar->setInvName(invName);
6007 } else {
6008 ar->setVarEnv(nullptr);
6010 tvRefcountedDecRef(&func);
6013 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCuf(PC& pc) {
6014 doFPushCuf(pc, false, false);
6017 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCufF(PC& pc) {
6018 doFPushCuf(pc, true, false);
6021 inline void OPTBLD_INLINE VMExecutionContext::iopFPushCufSafe(PC& pc) {
6022 doFPushCuf(pc, false, true);
6025 static inline ActRec* arFromInstr(TypedValue* sp, const Opcode* pc) {
6026 return arFromSpOffset((ActRec*)sp, instrSpToArDelta(pc));
6029 inline void OPTBLD_INLINE VMExecutionContext::iopFPassC(PC& pc) {
6030 #ifdef DEBUG
6031 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc);
6032 #endif
6033 NEXT();
6034 DECODE_IVA(paramId);
6035 #ifdef DEBUG
6036 assert(paramId < ar->numArgs());
6037 #endif
6040 #define FPASSC_CHECKED_PRELUDE \
6041 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc); \
6042 NEXT(); \
6043 DECODE_IVA(paramId); \
6044 assert(paramId < ar->numArgs()); \
6045 const Func* func = ar->m_func;
6047 inline void OPTBLD_INLINE VMExecutionContext::iopFPassCW(PC& pc) {
6048 FPASSC_CHECKED_PRELUDE
6049 if (func->mustBeRef(paramId)) {
6050 TRACE(1, "FPassCW: function %s(%d) param %d is by reference, "
6051 "raising a strict warning (attr:0x%x)\n",
6052 func->name()->data(), func->numParams(), paramId,
6053 func->info() ? func->info()->attribute : 0);
6054 raise_strict_warning("Only variables should be passed by reference");
6058 inline void OPTBLD_INLINE VMExecutionContext::iopFPassCE(PC& pc) {
6059 FPASSC_CHECKED_PRELUDE
6060 if (func->mustBeRef(paramId)) {
6061 TRACE(1, "FPassCE: function %s(%d) param %d is by reference, "
6062 "throwing a fatal error (attr:0x%x)\n",
6063 func->name()->data(), func->numParams(), paramId,
6064 func->info() ? func->info()->attribute : 0);
6065 raise_error("Cannot pass parameter %d by reference", paramId+1);
6069 #undef FPASSC_CHECKED_PRELUDE
6071 inline void OPTBLD_INLINE VMExecutionContext::iopFPassV(PC& pc) {
6072 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc);
6073 NEXT();
6074 DECODE_IVA(paramId);
6075 assert(paramId < ar->numArgs());
6076 const Func* func = ar->m_func;
6077 if (!func->byRef(paramId)) {
6078 m_stack.unbox();
6082 inline void OPTBLD_INLINE VMExecutionContext::iopFPassR(PC& pc) {
6083 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc);
6084 NEXT();
6085 DECODE_IVA(paramId);
6086 assert(paramId < ar->numArgs());
6087 const Func* func = ar->m_func;
6088 if (func->byRef(paramId)) {
6089 TypedValue* tv = m_stack.topTV();
6090 if (tv->m_type != KindOfRef) {
6091 tvBox(tv);
6093 } else {
6094 if (m_stack.topTV()->m_type == KindOfRef) {
6095 m_stack.unbox();
6100 inline void OPTBLD_INLINE VMExecutionContext::iopFPassL(PC& pc) {
6101 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc);
6102 NEXT();
6103 DECODE_IVA(paramId);
6104 DECODE_HA(local);
6105 assert(paramId < ar->numArgs());
6106 TypedValue* fr = frame_local(m_fp, local);
6107 TypedValue* to = m_stack.allocTV();
6108 if (!ar->m_func->byRef(paramId)) {
6109 cgetl_body(m_fp, fr, to, local);
6110 } else {
6111 vgetl_body(fr, to);
6115 inline void OPTBLD_INLINE VMExecutionContext::iopFPassN(PC& pc) {
6116 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc);
6117 PC origPc = pc;
6118 NEXT();
6119 DECODE_IVA(paramId);
6120 assert(paramId < ar->numArgs());
6121 if (!ar->m_func->byRef(paramId)) {
6122 iopCGetN(origPc);
6123 } else {
6124 iopVGetN(origPc);
6128 inline void OPTBLD_INLINE VMExecutionContext::iopFPassG(PC& pc) {
6129 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc);
6130 PC origPc = pc;
6131 NEXT();
6132 DECODE_IVA(paramId);
6133 assert(paramId < ar->numArgs());
6134 if (!ar->m_func->byRef(paramId)) {
6135 iopCGetG(origPc);
6136 } else {
6137 iopVGetG(origPc);
6141 inline void OPTBLD_INLINE VMExecutionContext::iopFPassS(PC& pc) {
6142 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc);
6143 PC origPc = pc;
6144 NEXT();
6145 DECODE_IVA(paramId);
6146 assert(paramId < ar->numArgs());
6147 if (!ar->m_func->byRef(paramId)) {
6148 iopCGetS(origPc);
6149 } else {
6150 iopVGetS(origPc);
6154 void VMExecutionContext::iopFPassM(PC& pc) {
6155 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc);
6156 NEXT();
6157 DECODE_IVA(paramId);
6158 assert(paramId < ar->numArgs());
6159 if (!ar->m_func->byRef(paramId)) {
6160 DECLARE_GETHELPER_ARGS
6161 getHelper(GETHELPER_ARGS);
6162 if (tvRet->m_type == KindOfRef) {
6163 tvUnbox(tvRet);
6165 } else {
6166 DECLARE_SETHELPER_ARGS
6167 TypedValue* tv1 = m_stack.allocTV();
6168 tvWriteUninit(tv1);
6169 if (!setHelperPre<false, true, false, true, 1,
6170 ConsumeAll>(MEMBERHELPERPRE_ARGS)) {
6171 if (base->m_type != KindOfRef) {
6172 tvBox(base);
6174 tvDupVar(base, tv1);
6175 } else {
6176 tvWriteNull(tv1);
6177 tvBox(tv1);
6179 setHelperPost<1>(SETHELPERPOST_ARGS);
6183 void VMExecutionContext::doFCall(ActRec* ar, PC& pc) {
6184 assert(getOuterVMFrame(ar) == m_fp);
6185 ar->m_savedRip = (uintptr_t)tx64->getRetFromInterpretedFrame();
6186 assert(isReturnHelper(ar->m_savedRip));
6187 TRACE(3, "FCall: pc %p func %p base %d\n", m_pc,
6188 m_fp->m_func->unit()->entry(),
6189 int(m_fp->m_func->base()));
6190 ar->m_soff = m_fp->m_func->unit()->offsetOf(pc)
6191 - (uintptr_t)m_fp->m_func->base();
6192 assert(pcOff() >= m_fp->m_func->base());
6193 prepareFuncEntry(ar, pc);
6194 SYNC();
6195 if (!EventHook::FunctionEnter(ar, EventHook::NormalFunc)) {
6196 pc = m_pc;
6200 inline void OPTBLD_INLINE VMExecutionContext::iopFCall(PC& pc) {
6201 ActRec* ar = arFromInstr(m_stack.top(), (Opcode*)pc);
6202 NEXT();
6203 DECODE_IVA(numArgs);
6204 assert(numArgs == ar->numArgs());
6205 checkStack(m_stack, ar->m_func);
6206 doFCall(ar, pc);
6209 // Return a function pointer type for calling a builtin with a given
6210 // return value and args.
6211 template<class Ret, class... Args> struct NativeFunction {
6212 typedef Ret (*type)(Args...);
6215 // Recursively pack all parameters up to call a native builtin.
6216 template<class Ret, size_t NArgs, size_t CurArg> struct NativeFuncCaller;
6217 template<class Ret, size_t NArgs, size_t CurArg> struct NativeFuncCaller {
6218 template<class... Args>
6219 static Ret call(const Func* func, TypedValue* tvs, Args... args) {
6220 typedef NativeFuncCaller<Ret,NArgs - 1,CurArg + 1> NextArgT;
6221 DataType type = func->params()[CurArg].builtinType();
6222 if (type == KindOfDouble) {
6223 // pass TV.m_data.dbl by value with C++ calling convention for doubles
6224 return NextArgT::call(func, tvs - 1, args..., tvs->m_data.dbl);
6226 if (type == KindOfInt64 || type == KindOfBoolean) {
6227 // pass TV.m_data.num by value
6228 return NextArgT::call(func, tvs - 1, args..., tvs->m_data.num);
6230 if (IS_STRING_TYPE(type) || type == KindOfArray || type == KindOfObject) {
6231 // pass ptr to TV.m_data for String&, Array&, or Object&
6232 return NextArgT::call(func, tvs - 1, args..., &tvs->m_data);
6234 // final case is for passing full value as Variant&
6235 return NextArgT::call(func, tvs - 1, args..., tvs);
6238 template<class Ret, size_t CurArg> struct NativeFuncCaller<Ret,0,CurArg> {
6239 template<class... Args>
6240 static Ret call(const Func* f, TypedValue*, Args... args) {
6241 typedef typename NativeFunction<Ret,Args...>::type FuncType;
6242 return reinterpret_cast<FuncType>(f->nativeFuncPtr())(args...);
6246 template<class Ret>
6247 static Ret makeNativeCall(const Func* f, TypedValue* args, size_t numArgs) {
6248 static_assert(kMaxBuiltinArgs == 5,
6249 "makeNativeCall needs updates for kMaxBuiltinArgs");
6250 switch (numArgs) {
6251 case 0: return NativeFuncCaller<Ret,0,0>::call(f, args);
6252 case 1: return NativeFuncCaller<Ret,1,0>::call(f, args);
6253 case 2: return NativeFuncCaller<Ret,2,0>::call(f, args);
6254 case 3: return NativeFuncCaller<Ret,3,0>::call(f, args);
6255 case 4: return NativeFuncCaller<Ret,4,0>::call(f, args);
6256 case 5: return NativeFuncCaller<Ret,5,0>::call(f, args);
6257 default: assert(false);
6259 not_reached();
6262 template<class Ret>
6263 static int makeNativeRefCall(const Func* f, Ret* ret,
6264 TypedValue* args, size_t numArgs) {
6265 switch (numArgs) {
6266 case 0: return NativeFuncCaller<int64_t,0,0>::call(f, args, ret);
6267 case 1: return NativeFuncCaller<int64_t,1,0>::call(f, args, ret);
6268 case 2: return NativeFuncCaller<int64_t,2,0>::call(f, args, ret);
6269 case 3: return NativeFuncCaller<int64_t,3,0>::call(f, args, ret);
6270 case 4: return NativeFuncCaller<int64_t,4,0>::call(f, args, ret);
6271 case 5: return NativeFuncCaller<int64_t,5,0>::call(f, args, ret);
6272 default: assert(false);
6274 not_reached();
6277 inline void OPTBLD_INLINE VMExecutionContext::iopFCallBuiltin(PC& pc) {
6278 NEXT();
6279 DECODE_IVA(numArgs);
6280 DECODE_IVA(numNonDefault);
6281 DECODE(Id, id);
6282 const NamedEntity* ne = m_fp->m_func->unit()->lookupNamedEntityId(id);
6283 Func* func = Unit::lookupFunc(ne);
6284 if (func == nullptr) {
6285 raise_error("Undefined function: %s",
6286 m_fp->m_func->unit()->lookupLitstrId(id)->data());
6288 TypedValue* args = m_stack.indTV(numArgs-1);
6289 assert(numArgs == func->numParams());
6290 for (int i = 0; i < numNonDefault; i++) {
6291 const Func::ParamInfo& pi = func->params()[i];
6293 #define CASE(kind) case KindOf ## kind : do { \
6294 tvCastTo ## kind ## InPlace(&args[-i]); break; \
6295 } while (0); break;
6297 switch (pi.builtinType()) {
6298 CASE(Boolean)
6299 CASE(Int64)
6300 CASE(Double)
6301 CASE(String)
6302 CASE(Array)
6303 CASE(Object)
6304 case KindOfUnknown:
6305 break;
6306 default:
6307 not_reached();
6310 #undef CASE
6312 TypedValue ret;
6313 ret.m_type = func->returnType();
6314 switch (func->returnType()) {
6315 case KindOfBoolean:
6316 ret.m_data.num = makeNativeCall<bool>(func, args, numArgs);
6317 break;
6318 case KindOfNull: /* void return type */
6319 case KindOfInt64:
6320 ret.m_data.num = makeNativeCall<int64_t>(func, args, numArgs);
6321 break;
6322 case KindOfString:
6323 case KindOfStaticString:
6324 case KindOfArray:
6325 case KindOfObject:
6326 makeNativeRefCall(func, &ret.m_data, args, numArgs);
6327 if (ret.m_data.num == 0) {
6328 ret.m_type = KindOfNull;
6330 break;
6331 case KindOfUnknown:
6332 makeNativeRefCall(func, &ret, args, numArgs);
6333 if (ret.m_type == KindOfUninit) {
6334 ret.m_type = KindOfNull;
6336 break;
6337 default:
6338 not_reached();
6341 frame_free_args(args, numNonDefault);
6342 m_stack.ndiscard(numArgs - 1);
6344 memcpy(m_stack.top(), &ret, sizeof(TypedValue));
6347 bool VMExecutionContext::prepareArrayArgs(ActRec* ar,
6348 ArrayData* args) {
6349 if (UNLIKELY(ar->hasInvName())) {
6350 m_stack.pushStringNoRc(ar->getInvName());
6351 m_stack.pushArray(args);
6352 ar->setVarEnv(0);
6353 ar->initNumArgs(2);
6354 return true;
6357 int nargs = args->size();
6358 const Func* f = ar->m_func;
6359 int nparams = f->numParams();
6360 int extra = nargs - nparams;
6361 if (extra < 0) {
6362 extra = 0;
6363 nparams = nargs;
6365 ssize_t pos = args->iter_begin();
6366 for (int i = 0; i < nparams; ++i) {
6367 TypedValue* from = const_cast<TypedValue*>(
6368 args->getValueRef(pos).asTypedValue());
6369 TypedValue* to = m_stack.allocTV();
6370 if (UNLIKELY(f->byRef(i))) {
6371 if (UNLIKELY(!tvAsVariant(from).isReferenced())) {
6372 raise_warning("Parameter %d to %s() expected to be a reference, "
6373 "value given", i + 1, f->fullName()->data());
6374 if (skipCufOnInvalidParams) {
6375 m_stack.discard();
6376 while (i--) m_stack.popTV();
6377 m_stack.popAR();
6378 m_stack.pushNull();
6379 return false;
6382 tvDup(from, to);
6383 } else {
6384 tvDup(from, to);
6385 if (UNLIKELY(to->m_type == KindOfRef)) {
6386 tvUnbox(to);
6389 pos = args->iter_advance(pos);
6391 if (extra && (ar->m_func->attrs() & AttrMayUseVV)) {
6392 ExtraArgs* extraArgs = ExtraArgs::allocateUninit(extra);
6393 for (int i = 0; i < extra; ++i) {
6394 TypedValue* to = extraArgs->getExtraArg(i);
6395 tvDup(args->getValueRef(pos).asTypedValue(), to);
6396 if (to->m_type == KindOfRef && to->m_data.pref->_count == 2) {
6397 tvUnbox(to);
6399 pos = args->iter_advance(pos);
6401 ar->setExtraArgs(extraArgs);
6402 ar->initNumArgs(nargs);
6403 } else {
6404 ar->initNumArgs(nparams);
6407 return true;
6410 static void cleanupParamsAndActRec(Stack& stack,
6411 ActRec* ar,
6412 ExtraArgs* extraArgs) {
6413 assert(stack.top() + (extraArgs ?
6414 ar->m_func->numParams() :
6415 ar->numArgs()) == (void*)ar);
6416 if (extraArgs) {
6417 const int numExtra = ar->numArgs() - ar->m_func->numParams();
6418 ExtraArgs::deallocate(extraArgs, numExtra);
6420 while (stack.top() != (void*)ar) {
6421 stack.popTV();
6423 stack.popAR();
6426 bool VMExecutionContext::doFCallArray(PC& pc) {
6427 ActRec* ar = (ActRec*)(m_stack.top() + 1);
6428 assert(ar->numArgs() == 1);
6430 Cell* c1 = m_stack.topC();
6431 if (skipCufOnInvalidParams && UNLIKELY(c1->m_type != KindOfArray)) {
6432 // task #1756122
6433 // this is what we /should/ do, but our code base depends
6434 // on the broken behavior of casting the second arg to an
6435 // array.
6436 cleanupParamsAndActRec(m_stack, ar, nullptr);
6437 m_stack.pushNull();
6438 raise_warning("call_user_func_array() expects parameter 2 to be array");
6439 return false;
6442 const Func* func = ar->m_func;
6444 Array args(LIKELY(c1->m_type == KindOfArray) ? c1->m_data.parr :
6445 tvAsVariant(c1).toArray().get());
6446 m_stack.popTV();
6447 checkStack(m_stack, func);
6449 assert(ar->m_savedRbp == (uint64_t)m_fp);
6450 assert(!ar->m_func->isGenerator());
6451 ar->m_savedRip = (uintptr_t)tx64->getRetFromInterpretedFrame();
6452 assert(isReturnHelper(ar->m_savedRip));
6453 TRACE(3, "FCallArray: pc %p func %p base %d\n", m_pc,
6454 m_fp->m_func->unit()->entry(),
6455 int(m_fp->m_func->base()));
6456 ar->m_soff = m_fp->m_func->unit()->offsetOf(pc)
6457 - (uintptr_t)m_fp->m_func->base();
6458 assert(pcOff() > m_fp->m_func->base());
6460 if (UNLIKELY(!prepareArrayArgs(ar, args.get()))) return false;
6463 if (UNLIKELY(!(prepareFuncEntry(ar, pc)))) {
6464 return false;
6466 SYNC();
6467 if (UNLIKELY(!EventHook::FunctionEnter(ar, EventHook::NormalFunc))) {
6468 pc = m_pc;
6469 return false;
6471 return true;
6474 inline void OPTBLD_INLINE VMExecutionContext::iopFCallArray(PC& pc) {
6475 NEXT();
6476 (void)doFCallArray(pc);
6479 inline void OPTBLD_INLINE VMExecutionContext::iopCufSafeArray(PC& pc) {
6480 NEXT();
6481 Array ret;
6482 ret.append(tvAsVariant(m_stack.top() + 1));
6483 ret.appendWithRef(tvAsVariant(m_stack.top() + 0));
6484 m_stack.popTV();
6485 m_stack.popTV();
6486 tvAsVariant(m_stack.top()) = ret;
6489 inline void OPTBLD_INLINE VMExecutionContext::iopCufSafeReturn(PC& pc) {
6490 NEXT();
6491 bool ok = tvAsVariant(m_stack.top() + 1).toBoolean();
6492 tvRefcountedDecRef(m_stack.top() + 1);
6493 tvRefcountedDecRef(m_stack.top() + (ok ? 2 : 0));
6494 if (ok) m_stack.top()[2] = m_stack.top()[0];
6495 m_stack.ndiscard(2);
6498 inline bool VMExecutionContext::initIterator(PC& pc, PC& origPc, Iter* it,
6499 Offset offset, Cell* c1) {
6500 bool hasElems = it->init(c1);
6501 if (!hasElems) {
6502 ITER_SKIP(offset);
6504 m_stack.popC();
6505 return hasElems;
6508 inline void OPTBLD_INLINE VMExecutionContext::iopIterInit(PC& pc) {
6509 PC origPc = pc;
6510 NEXT();
6511 DECODE_IA(itId);
6512 DECODE(Offset, offset);
6513 DECODE_HA(val);
6514 Cell* c1 = m_stack.topC();
6515 Iter* it = frame_iter(m_fp, itId);
6516 TypedValue* tv1 = frame_local(m_fp, val);
6517 if (initIterator(pc, origPc, it, offset, c1)) {
6518 tvAsVariant(tv1) = it->arr().second();
6522 inline void OPTBLD_INLINE VMExecutionContext::iopIterInitK(PC& pc) {
6523 PC origPc = pc;
6524 NEXT();
6525 DECODE_IA(itId);
6526 DECODE(Offset, offset);
6527 DECODE_HA(val);
6528 DECODE_HA(key);
6529 Cell* c1 = m_stack.topC();
6530 Iter* it = frame_iter(m_fp, itId);
6531 TypedValue* tv1 = frame_local(m_fp, val);
6532 TypedValue* tv2 = frame_local(m_fp, key);
6533 if (initIterator(pc, origPc, it, offset, c1)) {
6534 tvAsVariant(tv1) = it->arr().second();
6535 tvAsVariant(tv2) = it->arr().first();
6539 inline void OPTBLD_INLINE VMExecutionContext::iopWIterInit(PC& pc) {
6540 PC origPc = pc;
6541 NEXT();
6542 DECODE_IA(itId);
6543 DECODE(Offset, offset);
6544 DECODE_HA(val);
6545 Cell* c1 = m_stack.topC();
6546 Iter* it = frame_iter(m_fp, itId);
6547 TypedValue* tv1 = frame_local(m_fp, val);
6548 if (initIterator(pc, origPc, it, offset, c1)) {
6549 tvAsVariant(tv1) = withRefBind(it->arr().secondRef());
6553 inline void OPTBLD_INLINE VMExecutionContext::iopWIterInitK(PC& pc) {
6554 PC origPc = pc;
6555 NEXT();
6556 DECODE_IA(itId);
6557 DECODE(Offset, offset);
6558 DECODE_HA(val);
6559 DECODE_HA(key);
6560 Cell* c1 = m_stack.topC();
6561 Iter* it = frame_iter(m_fp, itId);
6562 TypedValue* tv1 = frame_local(m_fp, val);
6563 TypedValue* tv2 = frame_local(m_fp, key);
6564 if (initIterator(pc, origPc, it, offset, c1)) {
6565 tvAsVariant(tv1) = withRefBind(it->arr().secondRef());
6566 tvAsVariant(tv2) = it->arr().first();
6570 inline bool VMExecutionContext::initIteratorM(PC& pc, PC& origPc, Iter* it,
6571 Offset offset, Var* v1) {
6572 bool hasElems = it->minit(v1);
6573 if (!hasElems) {
6574 ITER_SKIP(offset);
6576 m_stack.popV();
6577 return hasElems;
6580 inline void OPTBLD_INLINE VMExecutionContext::iopMIterInit(PC& pc) {
6581 PC origPc = pc;
6582 NEXT();
6583 DECODE_IA(itId);
6584 DECODE(Offset, offset);
6585 DECODE_HA(val);
6586 Var* v1 = m_stack.topV();
6587 assert(v1->m_type == KindOfRef);
6588 Iter* it = frame_iter(m_fp, itId);
6589 TypedValue* tv1 = frame_local(m_fp, val);
6590 if (initIteratorM(pc, origPc, it, offset, v1)) {
6591 tvAsVariant(tv1).assignRef(it->marr().val());
6595 inline void OPTBLD_INLINE VMExecutionContext::iopMIterInitK(PC& pc) {
6596 PC origPc = pc;
6597 NEXT();
6598 DECODE_IA(itId);
6599 DECODE(Offset, offset);
6600 DECODE_HA(val);
6601 DECODE_HA(key);
6602 Var* v1 = m_stack.topV();
6603 assert(v1->m_type == KindOfRef);
6604 Iter* it = frame_iter(m_fp, itId);
6605 TypedValue* tv1 = frame_local(m_fp, val);
6606 TypedValue* tv2 = frame_local(m_fp, key);
6607 if (initIteratorM(pc, origPc, it, offset, v1)) {
6608 tvAsVariant(tv1).assignRef(it->marr().val());
6609 tvAsVariant(tv2) = it->marr().key();
6613 inline void OPTBLD_INLINE VMExecutionContext::iopIterNext(PC& pc) {
6614 PC origPc = pc;
6615 NEXT();
6616 DECODE_IA(itId);
6617 DECODE(Offset, offset);
6618 DECODE_HA(val);
6619 Iter* it = frame_iter(m_fp, itId);
6620 TypedValue* tv1 = frame_local(m_fp, val);
6621 if (it->next()) {
6622 ITER_SKIP(offset);
6623 tvAsVariant(tv1) = it->arr().second();
6627 inline void OPTBLD_INLINE VMExecutionContext::iopIterNextK(PC& pc) {
6628 PC origPc = pc;
6629 NEXT();
6630 DECODE_IA(itId);
6631 DECODE(Offset, offset);
6632 DECODE_HA(val);
6633 DECODE_HA(key);
6634 Iter* it = frame_iter(m_fp, itId);
6635 TypedValue* tv1 = frame_local(m_fp, val);
6636 TypedValue* tv2 = frame_local(m_fp, key);
6637 if (it->next()) {
6638 ITER_SKIP(offset);
6639 tvAsVariant(tv1) = it->arr().second();
6640 tvAsVariant(tv2) = it->arr().first();
6644 inline void OPTBLD_INLINE VMExecutionContext::iopWIterNext(PC& pc) {
6645 PC origPc = pc;
6646 NEXT();
6647 DECODE_IA(itId);
6648 DECODE(Offset, offset);
6649 DECODE_HA(val);
6650 Iter* it = frame_iter(m_fp, itId);
6651 TypedValue* tv1 = frame_local(m_fp, val);
6652 if (it->next()) {
6653 ITER_SKIP(offset);
6654 tvAsVariant(tv1) = withRefBind(it->arr().secondRef());
6658 inline void OPTBLD_INLINE VMExecutionContext::iopWIterNextK(PC& pc) {
6659 PC origPc = pc;
6660 NEXT();
6661 DECODE_IA(itId);
6662 DECODE(Offset, offset);
6663 DECODE_HA(val);
6664 DECODE_HA(key);
6665 Iter* it = frame_iter(m_fp, itId);
6666 TypedValue* tv1 = frame_local(m_fp, val);
6667 TypedValue* tv2 = frame_local(m_fp, key);
6668 if (it->next()) {
6669 ITER_SKIP(offset);
6670 tvAsVariant(tv1) = withRefBind(it->arr().secondRef());
6671 tvAsVariant(tv2) = it->arr().first();
6675 inline void OPTBLD_INLINE VMExecutionContext::iopMIterNext(PC& pc) {
6676 PC origPc = pc;
6677 NEXT();
6678 DECODE_IA(itId);
6679 DECODE(Offset, offset);
6680 DECODE_HA(val);
6681 Iter* it = frame_iter(m_fp, itId);
6682 TypedValue* tv1 = frame_local(m_fp, val);
6683 if (it->mnext()) {
6684 ITER_SKIP(offset);
6685 tvAsVariant(tv1).assignRef(it->marr().val());
6689 inline void OPTBLD_INLINE VMExecutionContext::iopMIterNextK(PC& pc) {
6690 PC origPc = pc;
6691 NEXT();
6692 DECODE_IA(itId);
6693 DECODE(Offset, offset);
6694 DECODE_HA(val);
6695 DECODE_HA(key);
6696 Iter* it = frame_iter(m_fp, itId);
6697 TypedValue* tv1 = frame_local(m_fp, val);
6698 TypedValue* tv2 = frame_local(m_fp, key);
6699 if (it->mnext()) {
6700 ITER_SKIP(offset);
6701 tvAsVariant(tv1).assignRef(it->marr().val());
6702 tvAsVariant(tv2) = it->marr().key();
6706 inline void OPTBLD_INLINE VMExecutionContext::iopIterFree(PC& pc) {
6707 NEXT();
6708 DECODE_IA(itId);
6709 Iter* it = frame_iter(m_fp, itId);
6710 it->free();
6713 inline void OPTBLD_INLINE VMExecutionContext::iopMIterFree(PC& pc) {
6714 NEXT();
6715 DECODE_IA(itId);
6716 Iter* it = frame_iter(m_fp, itId);
6717 it->mfree();
6720 inline void OPTBLD_INLINE VMExecutionContext::iopCIterFree(PC& pc) {
6721 NEXT();
6722 DECODE_IA(itId);
6723 Iter* it = frame_iter(m_fp, itId);
6724 it->cfree();
6727 inline void OPTBLD_INLINE inclOp(VMExecutionContext *ec, PC &pc,
6728 InclOpFlags flags) {
6729 NEXT();
6730 Cell* c1 = ec->m_stack.topC();
6731 String path(prepareKey(c1));
6732 bool initial;
6733 TRACE(2, "inclOp %s %s %s %s %s \"%s\"\n",
6734 flags & InclOpOnce ? "Once" : "",
6735 flags & InclOpDocRoot ? "DocRoot" : "",
6736 flags & InclOpRelative ? "Relative" : "",
6737 flags & InclOpLocal ? "Local" : "",
6738 flags & InclOpFatal ? "Fatal" : "",
6739 path->data());
6741 Unit* u = flags & (InclOpDocRoot|InclOpRelative) ?
6742 ec->evalIncludeRoot(path.get(), flags, &initial) :
6743 ec->evalInclude(path.get(), ec->m_fp->m_func->unit()->filepath(), &initial);
6744 ec->m_stack.popC();
6745 if (u == nullptr) {
6746 ((flags & InclOpFatal) ?
6747 (void (*)(const char *, ...))raise_error :
6748 (void (*)(const char *, ...))raise_warning)("File not found: %s",
6749 path->data());
6750 ec->m_stack.pushFalse();
6751 } else {
6752 if (!(flags & InclOpOnce) || initial) {
6753 ec->evalUnit(u, (flags & InclOpLocal), pc, EventHook::PseudoMain);
6754 } else {
6755 Stats::inc(Stats::PseudoMain_Guarded);
6756 ec->m_stack.pushTrue();
6761 inline void OPTBLD_INLINE VMExecutionContext::iopIncl(PC& pc) {
6762 inclOp(this, pc, InclOpDefault);
6765 inline void OPTBLD_INLINE VMExecutionContext::iopInclOnce(PC& pc) {
6766 inclOp(this, pc, InclOpOnce);
6769 inline void OPTBLD_INLINE VMExecutionContext::iopReq(PC& pc) {
6770 inclOp(this, pc, InclOpFatal);
6773 inline void OPTBLD_INLINE VMExecutionContext::iopReqOnce(PC& pc) {
6774 inclOp(this, pc, InclOpFatal | InclOpOnce);
6777 inline void OPTBLD_INLINE VMExecutionContext::iopReqDoc(PC& pc) {
6778 inclOp(this, pc, InclOpFatal | InclOpOnce | InclOpDocRoot);
6781 inline void OPTBLD_INLINE VMExecutionContext::iopEval(PC& pc) {
6782 NEXT();
6783 Cell* c1 = m_stack.topC();
6784 String code(prepareKey(c1));
6785 String prefixedCode = concat("<?php ", code);
6786 Unit* unit = compileEvalString(prefixedCode.get());
6787 if (unit == nullptr) {
6788 raise_error("Syntax error in eval()");
6790 m_stack.popC();
6791 evalUnit(unit, false, pc, EventHook::Eval);
6794 inline void OPTBLD_INLINE VMExecutionContext::iopDefFunc(PC& pc) {
6795 NEXT();
6796 DECODE_IVA(fid);
6797 Func* f = m_fp->m_func->unit()->lookupFuncId(fid);
6798 f->setCached();
6801 inline void OPTBLD_INLINE VMExecutionContext::iopDefCls(PC& pc) {
6802 NEXT();
6803 DECODE_IVA(cid);
6804 PreClass* c = m_fp->m_func->unit()->lookupPreClassId(cid);
6805 Unit::defClass(c);
6808 inline void OPTBLD_INLINE VMExecutionContext::iopDefTypedef(PC& pc) {
6809 NEXT();
6810 DECODE_IVA(tid);
6811 m_fp->m_func->unit()->defTypedef(tid);
6814 static inline void checkThis(ActRec* fp) {
6815 if (!fp->hasThis()) {
6816 raise_error(Strings::FATAL_NULL_THIS);
6820 inline void OPTBLD_INLINE VMExecutionContext::iopThis(PC& pc) {
6821 NEXT();
6822 checkThis(m_fp);
6823 ObjectData* this_ = m_fp->getThis();
6824 m_stack.pushObject(this_);
6827 inline void OPTBLD_INLINE VMExecutionContext::iopBareThis(PC& pc) {
6828 NEXT();
6829 DECODE(unsigned char, notice);
6830 if (m_fp->hasThis()) {
6831 ObjectData* this_ = m_fp->getThis();
6832 m_stack.pushObject(this_);
6833 } else {
6834 m_stack.pushNull();
6835 if (notice) raise_notice(Strings::WARN_NULL_THIS);
6839 inline void OPTBLD_INLINE VMExecutionContext::iopCheckThis(PC& pc) {
6840 NEXT();
6841 checkThis(m_fp);
6844 inline void OPTBLD_INLINE VMExecutionContext::iopInitThisLoc(PC& pc) {
6845 NEXT();
6846 DECODE_IVA(id);
6847 TypedValue* thisLoc = frame_local(m_fp, id);
6848 tvRefcountedDecRef(thisLoc);
6849 if (m_fp->hasThis()) {
6850 thisLoc->m_data.pobj = m_fp->getThis();
6851 thisLoc->m_type = KindOfObject;
6852 tvIncRef(thisLoc);
6853 } else {
6854 tvWriteUninit(thisLoc);
6859 * Helper for StaticLoc and StaticLocInit.
6861 static inline void
6862 lookupStatic(StringData* name,
6863 const ActRec* fp,
6864 TypedValue*&val, bool& inited) {
6865 HphpArray* map = get_static_locals(fp);
6866 assert(map != nullptr);
6867 val = map->nvGet(name);
6868 if (val == nullptr) {
6869 TypedValue tv;
6870 tvWriteUninit(&tv);
6871 map->nvSet(name, &tv, false);
6872 val = map->nvGet(name);
6873 inited = false;
6874 } else {
6875 inited = true;
6879 inline void OPTBLD_INLINE VMExecutionContext::iopStaticLoc(PC& pc) {
6880 NEXT();
6881 DECODE_IVA(localId);
6882 DECODE_LITSTR(var);
6883 TypedValue* fr = nullptr;
6884 bool inited;
6885 lookupStatic(var, m_fp, fr, inited);
6886 assert(fr != nullptr);
6887 if (fr->m_type != KindOfRef) {
6888 assert(!inited);
6889 tvBox(fr);
6891 TypedValue* tvLocal = frame_local(m_fp, localId);
6892 tvBind(fr, tvLocal);
6893 if (inited) {
6894 m_stack.pushTrue();
6895 } else {
6896 m_stack.pushFalse();
6900 inline void OPTBLD_INLINE VMExecutionContext::iopStaticLocInit(PC& pc) {
6901 NEXT();
6902 DECODE_IVA(localId);
6903 DECODE_LITSTR(var);
6904 TypedValue* fr = nullptr;
6905 bool inited;
6906 lookupStatic(var, m_fp, fr, inited);
6907 assert(fr != nullptr);
6908 if (!inited) {
6909 Cell* initVal = m_stack.topC();
6910 tvDup(initVal, fr);
6912 if (fr->m_type != KindOfRef) {
6913 assert(!inited);
6914 tvBox(fr);
6916 TypedValue* tvLocal = frame_local(m_fp, localId);
6917 tvBind(fr, tvLocal);
6918 m_stack.discard();
6921 inline void OPTBLD_INLINE VMExecutionContext::iopCatch(PC& pc) {
6922 NEXT();
6923 assert(m_faults.size() > 0);
6924 Fault fault = m_faults.back();
6925 m_faults.pop_back();
6926 assert(fault.m_faultType == Fault::UserException);
6927 m_stack.pushObjectNoRc(fault.m_userException);
6930 inline void OPTBLD_INLINE VMExecutionContext::iopLateBoundCls(PC& pc) {
6931 NEXT();
6932 Class* cls = frameStaticClass(m_fp);
6933 if (!cls) {
6934 raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
6936 m_stack.pushClass(cls);
6939 inline void OPTBLD_INLINE VMExecutionContext::iopVerifyParamType(PC& pc) {
6940 SYNC(); // We might need m_pc to be updated to throw.
6941 NEXT();
6943 DECODE_IVA(param);
6944 const Func *func = m_fp->m_func;
6945 assert(param < func->numParams());
6946 assert(func->numParams() == int(func->params().size()));
6947 const TypeConstraint& tc = func->params()[param].typeConstraint();
6948 assert(tc.exists());
6949 const TypedValue *tv = frame_local(m_fp, param);
6950 tc.verify(tv, func, param);
6953 inline void OPTBLD_INLINE VMExecutionContext::iopNativeImpl(PC& pc) {
6954 NEXT();
6955 uint soff = m_fp->m_soff;
6956 BuiltinFunction func = m_fp->m_func->builtinFuncPtr();
6957 assert(func);
6958 // Actually call the native implementation. This will handle freeing the
6959 // locals in the normal case. In the case of an exception, the VM unwinder
6960 // will take care of it.
6961 func(m_fp);
6962 // Adjust the stack; the native implementation put the return value in the
6963 // right place for us already
6964 m_stack.ndiscard(m_fp->m_func->numSlotsInFrame());
6965 ActRec* sfp = m_fp->arGetSfp();
6966 if (LIKELY(sfp != m_fp)) {
6967 // Restore caller's execution state.
6968 m_fp = sfp;
6969 pc = m_fp->m_func->unit()->entry() + m_fp->m_func->base() + soff;
6970 m_stack.ret();
6971 } else {
6972 // No caller; terminate.
6973 m_stack.ret();
6974 #ifdef HPHP_TRACE
6976 std::ostringstream os;
6977 m_stack.toStringElm(os, m_stack.topTV(), m_fp);
6978 ONTRACE(1,
6979 Trace::trace("Return %s from VMExecutionContext::dispatch("
6980 "%p)\n", os.str().c_str(), m_fp));
6982 #endif
6983 pc = 0;
6987 inline void OPTBLD_INLINE VMExecutionContext::iopHighInvalid(PC& pc) {
6988 fprintf(stderr, "invalid bytecode executed\n");
6989 abort();
6992 inline void OPTBLD_INLINE VMExecutionContext::iopSelf(PC& pc) {
6993 NEXT();
6994 Class* clss = arGetContextClass(m_fp);
6995 if (!clss) {
6996 raise_error(HPHP::Strings::CANT_ACCESS_SELF);
6998 m_stack.pushClass(clss);
7001 inline void OPTBLD_INLINE VMExecutionContext::iopParent(PC& pc) {
7002 NEXT();
7003 Class* clss = arGetContextClass(m_fp);
7004 if (!clss) {
7005 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
7007 Class* parent = clss->parent();
7008 if (!parent) {
7009 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
7011 m_stack.pushClass(parent);
7014 inline void OPTBLD_INLINE VMExecutionContext::iopCreateCl(PC& pc) {
7015 NEXT();
7016 DECODE_IVA(numArgs);
7017 DECODE_LITSTR(clsName);
7018 Class* cls = Unit::loadClass(clsName);
7019 c_Closure* cl = static_cast<c_Closure*>(newInstance(cls));
7020 c_Closure* cl2 = cl->init(numArgs, m_fp, m_stack.top());
7021 m_stack.ndiscard(numArgs);
7022 assert(cl == cl2);
7023 m_stack.pushObject(cl2);
7026 template<bool isMethod>
7027 c_Continuation*
7028 VMExecutionContext::createContinuationHelper(const Func* origFunc,
7029 const Func* genFunc,
7030 ObjectData* thisPtr,
7031 ArrayData* args,
7032 Class* frameStaticCls) {
7033 auto const cont = c_Continuation::alloc(
7034 SystemLib::s_ContinuationClass,
7035 genFunc->numLocals(),
7036 genFunc->numIterators()
7038 cont->incRefCount();
7039 cont->setNoDestruct();
7040 cont->init(origFunc, thisPtr, args);
7042 // The ActRec corresponding to the generator body lives as long as the object
7043 // does. We set it up once, here, and then just change FP to point to it when
7044 // we enter the generator body.
7045 ActRec* ar = cont->actRec();
7047 if (isMethod) {
7048 if (origFunc->isClosureBody()) {
7049 genFunc = genFunc->cloneAndSetClass(origFunc->cls());
7052 if (thisPtr) {
7053 ar->setThis(thisPtr);
7054 thisPtr->incRefCount();
7055 } else {
7056 ar->setClass(frameStaticCls);
7058 } else {
7059 ar->setThis(nullptr);
7062 ar->m_func = genFunc;
7063 ar->initNumArgs(1);
7064 ar->setVarEnv(nullptr);
7066 TypedValue* contLocal = frame_local(ar, 0);
7067 contLocal->m_type = KindOfObject;
7068 contLocal->m_data.pobj = cont;
7069 // Do not incref the continuation here! Doing so will create a reference
7070 // cycle, since this reference is a local in the continuation frame and thus
7071 // will be decreffed when the continuation is destroyed. The corresponding
7072 // non-decref is in ~c_Continuation.
7074 return cont;
7077 template<bool isMethod>
7078 c_Continuation*
7079 VMExecutionContext::createContinuation(ActRec* fp,
7080 bool getArgs,
7081 const Func* origFunc,
7082 const Func* genFunc) {
7083 ObjectData* const thisPtr = fp->hasThis() ? fp->getThis() : nullptr;
7085 Array args;
7086 if (getArgs) {
7087 args = hhvm_get_frame_args(fp);
7090 return createContinuationHelper<isMethod>(
7091 origFunc,
7092 genFunc,
7093 thisPtr,
7094 args.get(),
7095 frameStaticClass(fp)
7099 static inline void setContVar(const Func* genFunc,
7100 const StringData* name,
7101 TypedValue* src,
7102 c_Continuation* cont) {
7103 Id destId = genFunc->lookupVarId(name);
7104 if (destId != kInvalidId) {
7105 tvDup(src, frame_local(cont->actRec(), destId));
7106 } else {
7107 ActRec *contFP = cont->actRec();
7108 if (!contFP->hasVarEnv()) {
7109 // We pass skipInsert to this VarEnv because it's going to exist
7110 // independent of the chain; i.e. we can't stack-allocate it. We link it
7111 // into the chain in UnpackCont, and take it out in PackCont.
7112 contFP->setVarEnv(VarEnv::createLazyAttach(contFP, true));
7114 contFP->getVarEnv()->setWithRef(name, src);
7118 static const StaticString s_this("this");
7120 c_Continuation*
7121 VMExecutionContext::fillContinuationVars(ActRec* fp,
7122 const Func* origFunc,
7123 const Func* genFunc,
7124 c_Continuation* cont) {
7125 // For functions that contain only named locals, the variable
7126 // environment is saved and restored by teleporting the values (and
7127 // their references) between the evaluation stack and the local
7128 // space at the end of the object using memcpy. Any variables in a
7129 // VarEnv are saved and restored from m_vars as usual.
7130 static const StringData* thisStr = s_this.get();
7131 int nLocals = genFunc->numLocals();
7132 bool skipThis;
7133 if (fp->hasVarEnv()) {
7134 Stats::inc(Stats::Cont_CreateVerySlow);
7135 Array definedVariables = fp->getVarEnv()->getDefinedVariables();
7136 skipThis = definedVariables.exists(s_this, true);
7138 for (ArrayIter iter(definedVariables); !iter.end(); iter.next()) {
7139 setContVar(genFunc, iter.first().getStringData(),
7140 const_cast<TypedValue*>(iter.secondRef().asTypedValue()), cont);
7142 } else {
7143 skipThis = origFunc->lookupVarId(thisStr) != kInvalidId;
7144 for (Id i = 0; i < origFunc->numNamedLocals(); ++i) {
7145 setContVar(genFunc, origFunc->localVarName(i),
7146 frame_local(fp, i), cont);
7150 // If $this is used as a local inside the body and is not provided
7151 // by our containing environment, just prefill it here instead of
7152 // using InitThisLoc inside the body
7153 if (!skipThis && cont->m_obj.get()) {
7154 Id id = genFunc->lookupVarId(thisStr);
7155 if (id != kInvalidId) {
7156 tvAsVariant(&cont->locals()[nLocals - id - 1]) = cont->m_obj;
7159 return cont;
7162 // Explicitly instantiate for hhbctranslator.o and codegen.o
7163 template c_Continuation* VMExecutionContext::createContinuation<true>(
7164 ActRec*, bool, const Func*, const Func*);
7165 template c_Continuation* VMExecutionContext::createContinuation<false>(
7166 ActRec*, bool, const Func*, const Func*);
7167 template c_Continuation* VMExecutionContext::createContinuationHelper<true>(
7168 const Func*, const Func*, ObjectData*, ArrayData*, Class*);
7169 template c_Continuation* VMExecutionContext::createContinuationHelper<false>(
7170 const Func*, const Func*, ObjectData*, ArrayData*, Class*);
7172 inline void OPTBLD_INLINE VMExecutionContext::iopCreateCont(PC& pc) {
7173 NEXT();
7174 DECODE_IVA(getArgs);
7175 DECODE_LITSTR(genName);
7177 const Func* origFunc = m_fp->m_func;
7178 const Func* genFunc = origFunc->getGeneratorBody(genName);
7179 assert(genFunc != nullptr);
7181 bool isMethod = origFunc->isMethod();
7182 c_Continuation* cont = isMethod ?
7183 createContinuation<true>(m_fp, getArgs, origFunc, genFunc) :
7184 createContinuation<false>(m_fp, getArgs, origFunc, genFunc);
7186 fillContinuationVars(m_fp, origFunc, genFunc, cont);
7188 TypedValue* ret = m_stack.allocTV();
7189 ret->m_type = KindOfObject;
7190 ret->m_data.pobj = cont;
7193 static inline c_Continuation* frame_continuation(ActRec* fp) {
7194 ObjectData* obj = frame_local(fp, 0)->m_data.pobj;
7195 assert(dynamic_cast<c_Continuation*>(obj));
7196 return static_cast<c_Continuation*>(obj);
7199 static inline c_Continuation* this_continuation(ActRec* fp) {
7200 ObjectData* obj = fp->getThis();
7201 assert(dynamic_cast<c_Continuation*>(obj));
7202 return static_cast<c_Continuation*>(obj);
7205 void VMExecutionContext::iopContEnter(PC& pc) {
7206 NEXT();
7208 // The stack must be empty! Or else generatorStackBase() won't work!
7209 assert(m_stack.top() == (TypedValue*)m_fp - m_fp->m_func->numSlotsInFrame());
7211 // Do linkage of the continuation's AR.
7212 assert(m_fp->hasThis());
7213 c_Continuation* cont = this_continuation(m_fp);
7214 ActRec* contAR = cont->actRec();
7215 arSetSfp(contAR, m_fp);
7217 contAR->m_soff = m_fp->m_func->unit()->offsetOf(pc)
7218 - (uintptr_t)m_fp->m_func->base();
7219 contAR->m_savedRip = (uintptr_t)tx64->getRetFromInterpretedGeneratorFrame();
7220 assert(isReturnHelper(contAR->m_savedRip));
7222 m_fp = contAR;
7223 pc = contAR->m_func->getEntry();
7224 SYNC();
7226 if (UNLIKELY(!EventHook::FunctionEnter(contAR, EventHook::NormalFunc))) {
7227 pc = m_pc;
7231 void VMExecutionContext::iopContExit(PC& pc) {
7232 NEXT();
7234 EventHook::FunctionExit(m_fp);
7235 ActRec* prevFp = m_fp->arGetSfp();
7236 pc = prevFp->m_func->getEntry() + m_fp->m_soff;
7237 m_fp = prevFp;
7240 void VMExecutionContext::unpackContVarEnvLinkage(ActRec* fp) {
7241 // This is called from the TC, and is assumed not to reenter.
7242 if (fp->hasVarEnv()) {
7243 VarEnv*& topVE = g_vmContext->m_topVarEnv;
7244 fp->getVarEnv()->setPrevious(topVE);
7245 topVE = fp->getVarEnv();
7249 inline void OPTBLD_INLINE VMExecutionContext::iopUnpackCont(PC& pc) {
7250 NEXT();
7251 c_Continuation* cont = frame_continuation(m_fp);
7253 unpackContVarEnvLinkage(m_fp);
7255 // Return the received value
7256 TypedValue* recv_to = m_stack.allocTV();
7257 TypedValue* recv_fr = cont->m_received.asTypedValue();
7258 memcpy(recv_to, recv_fr, sizeof(TypedValue));
7259 tvWriteNull(recv_fr);
7261 // Return the label in a stack cell
7262 TypedValue* label = m_stack.allocTV();
7263 label->m_type = KindOfInt64;
7264 label->m_data.num = cont->m_label;
7267 void VMExecutionContext::packContVarEnvLinkage(ActRec* fp) {
7268 if (fp->hasVarEnv()) {
7269 g_vmContext->m_topVarEnv = fp->getVarEnv()->previous();
7273 inline void OPTBLD_INLINE VMExecutionContext::iopPackCont(PC& pc) {
7274 NEXT();
7275 DECODE_IVA(label);
7276 c_Continuation* cont = frame_continuation(m_fp);
7278 packContVarEnvLinkage(m_fp);
7279 cont->c_Continuation::t_update(label, tvAsCVarRef(m_stack.topTV()));
7280 m_stack.popTV();
7283 inline void OPTBLD_INLINE VMExecutionContext::iopContRetC(PC& pc) {
7284 NEXT();
7285 c_Continuation* cont = frame_continuation(m_fp);
7286 cont->setDone(true);
7287 tvSetIgnoreRef(m_stack.topC(), cont->m_value.asTypedValue());
7288 m_stack.popC();
7290 EventHook::FunctionExit(m_fp);
7291 ActRec* prevFp = m_fp->arGetSfp();
7292 pc = prevFp->m_func->getEntry() + m_fp->m_soff;
7293 m_fp = prevFp;
7296 inline void OPTBLD_INLINE VMExecutionContext::iopContNext(PC& pc) {
7297 NEXT();
7298 c_Continuation* cont = this_continuation(m_fp);
7299 cont->preNext();
7300 cont->m_received.setNull();
7303 template<bool raise>
7304 inline void VMExecutionContext::contSendImpl() {
7305 c_Continuation* cont = this_continuation(m_fp);
7306 cont->startedCheck();
7307 cont->preNext();
7308 cont->m_received.assignVal(tvAsVariant(frame_local(m_fp, 0)));
7309 if (raise) {
7310 assert(cont->m_label);
7311 --cont->m_label;
7315 inline void OPTBLD_INLINE VMExecutionContext::iopContSend(PC& pc) {
7316 NEXT();
7317 contSendImpl<false>();
7320 inline void OPTBLD_INLINE VMExecutionContext::iopContRaise(PC& pc) {
7321 NEXT();
7322 contSendImpl<true>();
7325 inline void OPTBLD_INLINE VMExecutionContext::iopContValid(PC& pc) {
7326 NEXT();
7327 TypedValue* tv = m_stack.allocTV();
7328 tvWriteUninit(tv);
7329 tvAsVariant(tv) = !this_continuation(m_fp)->done();
7332 inline void OPTBLD_INLINE VMExecutionContext::iopContCurrent(PC& pc) {
7333 NEXT();
7334 c_Continuation* cont = this_continuation(m_fp);
7335 cont->startedCheck();
7337 TypedValue* tv = m_stack.allocTV();
7338 tvWriteUninit(tv);
7339 tvAsVariant(tv) = cont->m_value;
7342 inline void OPTBLD_INLINE VMExecutionContext::iopContStopped(PC& pc) {
7343 NEXT();
7344 this_continuation(m_fp)->setRunning(false);
7347 inline void OPTBLD_INLINE VMExecutionContext::iopContHandle(PC& pc) {
7348 NEXT();
7349 c_Continuation* cont = this_continuation(m_fp);
7350 cont->setRunning(false);
7351 cont->setDone(true);
7352 cont->m_value.setNull();
7354 Variant exn = tvAsVariant(m_stack.topTV());
7355 m_stack.popC();
7356 assert(exn.asObjRef().instanceof(SystemLib::s_ExceptionClass));
7357 throw exn.asObjRef();
7360 inline void OPTBLD_INLINE VMExecutionContext::iopStrlen(PC& pc) {
7361 NEXT();
7362 TypedValue* subj = m_stack.topTV();
7363 if (LIKELY(IS_STRING_TYPE(subj->m_type))) {
7364 int64_t ans = subj->m_data.pstr->size();
7365 tvRefcountedDecRef(subj);
7366 subj->m_type = KindOfInt64;
7367 subj->m_data.num = ans;
7368 } else {
7369 Variant ans = f_strlen(tvAsVariant(subj));
7370 tvAsVariant(subj) = ans;
7374 inline void OPTBLD_INLINE VMExecutionContext::iopIncStat(PC& pc) {
7375 NEXT();
7376 DECODE_IVA(counter);
7377 DECODE_IVA(value);
7378 Stats::inc(Stats::StatCounter(counter), value);
7381 void VMExecutionContext::classExistsImpl(PC& pc, Attr typeAttr) {
7382 NEXT();
7383 TypedValue* aloadTV = m_stack.topTV();
7384 tvCastToBooleanInPlace(aloadTV);
7385 assert(aloadTV->m_type == KindOfBoolean);
7386 bool autoload = aloadTV->m_data.num;
7387 m_stack.popX();
7389 TypedValue* name = m_stack.topTV();
7390 tvCastToStringInPlace(name);
7391 assert(IS_STRING_TYPE(name->m_type));
7393 tvAsVariant(name) = Unit::classExists(name->m_data.pstr, autoload, typeAttr);
7396 inline void OPTBLD_INLINE VMExecutionContext::iopClassExists(PC& pc) {
7397 classExistsImpl(pc, AttrNone);
7400 inline void OPTBLD_INLINE VMExecutionContext::iopInterfaceExists(PC& pc) {
7401 classExistsImpl(pc, AttrInterface);
7404 inline void OPTBLD_INLINE VMExecutionContext::iopTraitExists(PC& pc) {
7405 classExistsImpl(pc, AttrTrait);
7408 string
7409 VMExecutionContext::prettyStack(const string& prefix) const {
7410 if (!getFP()) {
7411 string s("__Halted");
7412 return s;
7414 int offset = (m_fp->m_func->unit() != nullptr)
7415 ? pcOff()
7416 : 0;
7417 string begPrefix = prefix + "__";
7418 string midPrefix = prefix + "|| ";
7419 string endPrefix = prefix + "\\/";
7420 string stack = m_stack.toString(m_fp, offset, midPrefix);
7421 return begPrefix + "\n" + stack + endPrefix;
7424 void VMExecutionContext::checkRegStateWork() const {
7425 assert(tl_regState == REGSTATE_CLEAN);
7428 void VMExecutionContext::DumpStack() {
7429 string s = g_vmContext->prettyStack("");
7430 fprintf(stderr, "%s\n", s.c_str());
7433 void VMExecutionContext::DumpCurUnit(int skip) {
7434 ActRec* fp = g_vmContext->getFP();
7435 Offset pc = fp->m_func->unit() ? g_vmContext->pcOff() : 0;
7436 while (skip--) {
7437 fp = g_vmContext->getPrevVMState(fp, &pc);
7439 if (fp == nullptr) {
7440 std::cout << "Don't have a valid fp\n";
7441 return;
7444 printf("Offset = %d, in function %s\n", pc, fp->m_func->name()->data());
7445 Unit* u = fp->m_func->unit();
7446 if (u == nullptr) {
7447 std::cout << "Current unit is NULL\n";
7448 return;
7450 printf("Dumping bytecode for %s(%p)\n", u->filepath()->data(), u);
7451 std::cout << u->toString();
7454 void VMExecutionContext::PrintTCCallerInfo() {
7455 VMRegAnchor _;
7456 ActRec* fp = g_vmContext->getFP();
7457 Unit* u = fp->m_func->unit();
7458 fprintf(stderr, "Called from TC address %p\n",
7459 TranslatorX64::Get()->getTranslatedCaller());
7460 std::cerr << u->filepath()->data() << ':'
7461 << u->getLineNumber(u->offsetOf(g_vmContext->getPC())) << std::endl;
7464 static inline void
7465 condStackTraceSep(const char* pfx) {
7466 TRACE(3, "%s"
7467 "========================================"
7468 "========================================\n",
7469 pfx);
7472 #define COND_STACKTRACE(pfx) \
7473 ONTRACE(3, \
7474 string stack = prettyStack(pfx); \
7475 Trace::trace("%s\n", stack.c_str());)
7477 #define O(name, imm, pusph, pop, flags) \
7478 void VMExecutionContext::op##name() { \
7479 condStackTraceSep("op"#name" "); \
7480 COND_STACKTRACE("op"#name" pre: "); \
7481 PC pc = m_pc; \
7482 assert(*pc == Op##name); \
7483 ONTRACE(1, \
7484 int offset = m_fp->m_func->unit()->offsetOf(pc); \
7485 Trace::trace("op"#name" offset: %d\n", offset)); \
7486 iop##name(pc); \
7487 SYNC(); \
7488 COND_STACKTRACE("op"#name" post: "); \
7489 condStackTraceSep("op"#name" "); \
7491 OPCODES
7492 #undef O
7493 #undef NEXT
7494 #undef DECODE_JMP
7495 #undef DECODE
7497 static inline void
7498 profileReturnValue(const DataType dt) {
7499 const Func* f = curFunc();
7500 if (f->isPseudoMain() || f->isClosureBody() || f->isMagic() ||
7501 Func::isSpecial(f->name()))
7502 return;
7503 recordType(TypeProfileKey(TypeProfileKey::MethodName, f->name()), dt);
7506 template <int dispatchFlags>
7507 inline void VMExecutionContext::dispatchImpl(int numInstrs) {
7508 static const bool limInstrs = dispatchFlags & LimitInstrs;
7509 static const bool breakOnCtlFlow = dispatchFlags & BreakOnCtlFlow;
7510 static const bool profile = dispatchFlags & Profile;
7511 static const void *optabDirect[] = {
7512 #define O(name, imm, push, pop, flags) \
7513 &&Label##name,
7514 OPCODES
7515 #undef O
7517 static const void *optabDbg[] = {
7518 #define O(name, imm, push, pop, flags) \
7519 &&LabelDbg##name,
7520 OPCODES
7521 #undef O
7523 static const void *optabCover[] = {
7524 #define O(name, imm, push, pop, flags) \
7525 &&LabelCover##name,
7526 OPCODES
7527 #undef O
7529 assert(sizeof(optabDirect) / sizeof(const void *) == Op_count);
7530 assert(sizeof(optabDbg) / sizeof(const void *) == Op_count);
7531 const void **optab = optabDirect;
7532 bool collectCoverage = ThreadInfo::s_threadInfo->
7533 m_reqInjectionData.getCoverage();
7534 if (collectCoverage) {
7535 optab = optabCover;
7537 DEBUGGER_ATTACHED_ONLY(optab = optabDbg);
7539 * Trace-only mapping of opcodes to names.
7541 #ifdef HPHP_TRACE
7542 static const char *nametab[] = {
7543 #define O(name, imm, push, pop, flags) \
7544 #name,
7545 OPCODES
7546 #undef O
7548 #endif /* HPHP_TRACE */
7549 bool isCtlFlow = false;
7551 #define DISPATCH() do { \
7552 if ((breakOnCtlFlow && isCtlFlow) || \
7553 (limInstrs && UNLIKELY(numInstrs-- == 0))) { \
7554 ONTRACE(1, \
7555 Trace::trace("dispatch: Halt ExecutionContext::dispatch(%p)\n", \
7556 m_fp)); \
7557 delete g_vmContext->m_lastLocFilter; \
7558 g_vmContext->m_lastLocFilter = nullptr; \
7559 return; \
7561 Op op = (Op)*pc; \
7562 COND_STACKTRACE("dispatch: "); \
7563 ONTRACE(1, \
7564 Trace::trace("dispatch: %d: %s\n", pcOff(), nametab[op])); \
7565 assert(op < Op_count); \
7566 if (profile && (op == OpRetC || op == OpRetV)) { \
7567 profileReturnValue(m_stack.top()->m_type); \
7569 goto *optab[op]; \
7570 } while (0)
7572 ONTRACE(1, Trace::trace("dispatch: Enter ExecutionContext::dispatch(%p)\n",
7573 m_fp));
7574 PC pc = m_pc;
7575 DISPATCH();
7577 #define O(name, imm, pusph, pop, flags) \
7578 LabelDbg##name: \
7579 phpDebuggerOpcodeHook(pc); \
7580 LabelCover##name: \
7581 if (collectCoverage) { \
7582 recordCodeCoverage(pc); \
7584 Label##name: { \
7585 iop##name(pc); \
7586 SYNC(); \
7587 if (breakOnCtlFlow) { \
7588 isCtlFlow = instrIsControlFlow(Op##name); \
7589 Stats::incOp(Op##name); \
7591 const Op op = Op##name; \
7592 if (op == OpRetC || op == OpRetV || op == OpNativeImpl) { \
7593 if (UNLIKELY(!pc)) { m_fp = 0; return; } \
7595 DISPATCH(); \
7597 OPCODES
7598 #undef O
7599 #undef DISPATCH
7602 class InterpretingFlagGuard {
7603 private:
7604 bool m_oldFlag;
7605 public:
7606 InterpretingFlagGuard() {
7607 m_oldFlag = g_vmContext->m_interpreting;
7608 g_vmContext->m_interpreting = true;
7610 ~InterpretingFlagGuard() {
7611 g_vmContext->m_interpreting = m_oldFlag;
7615 void VMExecutionContext::dispatch() {
7616 InterpretingFlagGuard ifg;
7617 if (shouldProfile()) {
7618 dispatchImpl<Profile>(0);
7619 } else {
7620 dispatchImpl<0>(0);
7624 void VMExecutionContext::dispatchN(int numInstrs) {
7625 InterpretingFlagGuard ifg;
7626 dispatchImpl<LimitInstrs | BreakOnCtlFlow>(numInstrs);
7627 // We are about to go back to Jit, check whether we should
7628 // stick with interpreter
7629 if (DEBUGGER_FORCE_INTR) {
7630 throw VMSwitchModeException(false);
7634 void VMExecutionContext::dispatchBB() {
7635 InterpretingFlagGuard ifg;
7636 dispatchImpl<BreakOnCtlFlow>(0);
7637 // We are about to go back to Jit, check whether we should
7638 // stick with interpreter
7639 if (DEBUGGER_FORCE_INTR) {
7640 throw VMSwitchModeException(false);
7644 void VMExecutionContext::recordCodeCoverage(PC pc) {
7645 Unit* unit = getFP()->m_func->unit();
7646 assert(unit != nullptr);
7647 if (unit == SystemLib::s_nativeFuncUnit ||
7648 unit == SystemLib::s_nativeClassUnit) {
7649 return;
7651 int line = unit->getLineNumber(pcOff());
7652 assert(line != -1);
7654 if (unit != m_coverPrevUnit || line != m_coverPrevLine) {
7655 ThreadInfo* info = ThreadInfo::s_threadInfo.getNoCheck();
7656 m_coverPrevUnit = unit;
7657 m_coverPrevLine = line;
7658 const StringData* filepath = unit->filepath();
7659 assert(filepath->isStatic());
7660 info->m_coverage->Record(filepath->data(), line, line);
7664 void VMExecutionContext::resetCoverageCounters() {
7665 m_coverPrevLine = -1;
7666 m_coverPrevUnit = nullptr;
7669 void VMExecutionContext::pushVMState(VMState &savedVM,
7670 const ActRec* reentryAR) {
7671 if (debug && savedVM.fp &&
7672 savedVM.fp->m_func &&
7673 savedVM.fp->m_func->unit()) {
7674 // Some asserts and tracing.
7675 const Func* func = savedVM.fp->m_func;
7676 (void) /* bound-check asserts in offsetOf */
7677 func->unit()->offsetOf(savedVM.pc);
7678 TRACE(3, "pushVMState: saving frame %s pc %p off %d fp %p\n",
7679 func->name()->data(),
7680 savedVM.pc,
7681 func->unit()->offsetOf(savedVM.pc),
7682 savedVM.fp);
7684 m_nestedVMs.push_back(ReentryRecord(savedVM, reentryAR));
7685 m_nesting++;
7688 void VMExecutionContext::popVMState() {
7689 assert(m_nestedVMs.size() >= 1);
7691 VMState &savedVM = m_nestedVMs.back().m_savedState;
7692 m_pc = savedVM.pc;
7693 m_fp = savedVM.fp;
7694 m_firstAR = savedVM.firstAR;
7695 assert(m_stack.top() == savedVM.sp);
7697 if (debug) {
7698 if (savedVM.fp &&
7699 savedVM.fp->m_func &&
7700 savedVM.fp->m_func->unit()) {
7701 const Func* func = savedVM.fp->m_func;
7702 (void) /* bound-check asserts in offsetOf */
7703 func->unit()->offsetOf(savedVM.pc);
7704 TRACE(3, "popVMState: restoring frame %s pc %p off %d fp %p\n",
7705 func->name()->data(),
7706 savedVM.pc,
7707 func->unit()->offsetOf(savedVM.pc),
7708 savedVM.fp);
7712 m_nestedVMs.pop_back();
7713 m_nesting--;
7716 void VMExecutionContext::requestInit() {
7717 assert(SystemLib::s_unit);
7718 assert(SystemLib::s_nativeFuncUnit);
7719 assert(SystemLib::s_nativeClassUnit);
7721 new (&s_requestArenaStorage) RequestArena();
7722 new (&s_varEnvArenaStorage) VarEnvArena();
7724 VarEnv::createGlobal();
7725 m_stack.requestInit();
7726 tx64 = nextTx64;
7727 tx64->requestInit();
7729 if (UNLIKELY(RuntimeOption::EvalJitEnableRenameFunction)) {
7730 SystemLib::s_unit->merge();
7731 if (SystemLib::s_hhas_unit) SystemLib::s_hhas_unit->merge();
7732 SystemLib::s_nativeFuncUnit->merge();
7733 SystemLib::s_nativeClassUnit->merge();
7734 } else {
7735 // System units are always merge only, and
7736 // everything is persistent.
7737 assert(SystemLib::s_unit->isEmpty());
7738 assert(!SystemLib::s_hhas_unit || SystemLib::s_hhas_unit->isEmpty());
7739 assert(SystemLib::s_nativeFuncUnit->isEmpty());
7740 assert(SystemLib::s_nativeClassUnit->isEmpty());
7743 profileRequestStart();
7745 #ifdef DEBUG
7746 Class* cls = Unit::GetNamedEntity(s_stdclass.get())->clsList();
7747 assert(cls);
7748 assert(cls == SystemLib::s_stdclassClass);
7749 #endif
7752 void VMExecutionContext::requestExit() {
7753 treadmillSharedVars();
7754 destructObjects();
7755 syncGdbState();
7756 tx64->requestExit();
7757 tx64 = nullptr;
7758 m_stack.requestExit();
7759 profileRequestEnd();
7760 EventHook::Disable();
7762 if (m_globalVarEnv) {
7763 assert(m_topVarEnv = m_globalVarEnv);
7764 VarEnv::destroy(m_globalVarEnv);
7765 m_globalVarEnv = m_topVarEnv = 0;
7768 varenv_arena().~VarEnvArena();
7769 request_arena().~RequestArena();
7772 ///////////////////////////////////////////////////////////////////////////////