Add an AliasCls bytecode
[hiphop-php.git] / hphp / runtime / vm / bytecode.cpp
blob61aaf0e2f6fd2a31e9d6e848dd845a770a530a3e
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/bytecode.h"
19 #include <algorithm>
20 #include <string>
21 #include <vector>
22 #include <sstream>
23 #include <iostream>
24 #include <iomanip>
25 #include <cinttypes>
27 #include <boost/filesystem.hpp>
29 #include <folly/String.h>
30 #include <folly/portability/SysMman.h>
32 #include "hphp/util/debug.h"
33 #include "hphp/util/numa.h"
34 #include "hphp/util/portability.h"
35 #include "hphp/util/ringbuffer.h"
36 #include "hphp/util/text-util.h"
37 #include "hphp/util/trace.h"
39 #include "hphp/compiler/builtin_symbols.h"
41 #include "hphp/system/systemlib.h"
43 #include "hphp/runtime/base/apc-stats.h"
44 #include "hphp/runtime/base/apc-typed-value.h"
45 #include "hphp/runtime/base/array-init.h"
46 #include "hphp/runtime/base/code-coverage.h"
47 #include "hphp/runtime/base/collections.h"
48 #include "hphp/runtime/base/container-functions.h"
49 #include "hphp/runtime/base/execution-context.h"
50 #include "hphp/runtime/base/externals.h"
51 #include "hphp/runtime/base/hhprof.h"
52 #include "hphp/runtime/base/memory-manager.h"
53 #include "hphp/runtime/base/mixed-array.h"
54 #include "hphp/runtime/base/set-array.h"
55 #include "hphp/runtime/base/program-functions.h"
56 #include "hphp/runtime/base/rds.h"
57 #include "hphp/runtime/base/repo-auth-type-codec.h"
58 #include "hphp/runtime/base/runtime-error.h"
59 #include "hphp/runtime/base/runtime-option.h"
60 #include "hphp/runtime/base/stat-cache.h"
61 #include "hphp/runtime/base/stats.h"
62 #include "hphp/runtime/base/strings.h"
63 #include "hphp/runtime/base/tv-arith.h"
64 #include "hphp/runtime/base/tv-comparisons.h"
65 #include "hphp/runtime/base/tv-conversions.h"
66 #include "hphp/runtime/base/unit-cache.h"
68 #include "hphp/runtime/ext/array/ext_array.h"
69 #include "hphp/runtime/ext/asio/ext_async-function-wait-handle.h"
70 #include "hphp/runtime/ext/asio/ext_async-generator-wait-handle.h"
71 #include "hphp/runtime/ext/asio/ext_async-generator.h"
72 #include "hphp/runtime/ext/asio/ext_static-wait-handle.h"
73 #include "hphp/runtime/ext/asio/ext_wait-handle.h"
74 #include "hphp/runtime/ext/asio/ext_waitable-wait-handle.h"
75 #include "hphp/runtime/ext/std/ext_std_closure.h"
76 #include "hphp/runtime/ext/extension.h"
77 #include "hphp/runtime/ext/generator/ext_generator.h"
78 #include "hphp/runtime/ext/hh/ext_hh.h"
79 #include "hphp/runtime/ext/reflection/ext_reflection.h"
80 #include "hphp/runtime/ext/std/ext_std_variable.h"
81 #include "hphp/runtime/ext/string/ext_string.h"
82 #include "hphp/runtime/ext/hash/hash_murmur.h"
83 #include "hphp/runtime/ext/json/JSON_parser.h"
85 #include "hphp/runtime/server/rpc-request-handler.h"
86 #include "hphp/runtime/server/source-root-info.h"
88 #include "hphp/runtime/vm/act-rec-defs.h"
89 #include "hphp/runtime/vm/act-rec.h"
90 #include "hphp/runtime/vm/debug/debug.h"
91 #include "hphp/runtime/vm/debugger-hook.h"
92 #include "hphp/runtime/vm/event-hook.h"
93 #include "hphp/runtime/vm/func-inline.h"
94 #include "hphp/runtime/vm/globals-array.h"
95 #include "hphp/runtime/vm/hh-utils.h"
96 #include "hphp/runtime/vm/hhbc-codec.h"
97 #include "hphp/runtime/vm/hhbc.h"
98 #include "hphp/runtime/vm/interp-helpers.h"
99 #include "hphp/runtime/vm/member-operations.h"
100 #include "hphp/runtime/vm/method-lookup.h"
101 #include "hphp/runtime/vm/native.h"
102 #include "hphp/runtime/vm/php-debug.h"
103 #include "hphp/runtime/vm/repo-global-data.h"
104 #include "hphp/runtime/vm/repo.h"
105 #include "hphp/runtime/vm/resumable.h"
106 #include "hphp/runtime/vm/runtime.h"
107 #include "hphp/runtime/vm/srckey.h"
108 #include "hphp/runtime/vm/type-constraint.h"
109 #include "hphp/runtime/vm/type-profile.h"
110 #include "hphp/runtime/vm/unwind.h"
112 #include "hphp/runtime/vm/jit/code-cache.h"
113 #include "hphp/runtime/vm/jit/debugger.h"
114 #include "hphp/runtime/vm/jit/enter-tc.h"
115 #include "hphp/runtime/vm/jit/perf-counters.h"
116 #include "hphp/runtime/vm/jit/tc.h"
117 #include "hphp/runtime/vm/jit/translator-inline.h"
118 #include "hphp/runtime/vm/jit/translator-runtime.h"
119 #include "hphp/runtime/vm/jit/translator.h"
120 #include "hphp/runtime/vm/jit/unwind-itanium.h"
123 namespace HPHP {
125 TRACE_SET_MOD(bcinterp);
127 // TODO: #1746957, #1756122
128 // we should skip the call in call_user_func_array, if
129 // by reference params are passed by value, or if its
130 // argument is not an array, but currently lots of tests
131 // depend on actually making the call.
132 const bool skipCufOnInvalidParams = false;
134 // RepoAuthoritative has been raptured out of runtime_option.cpp. It needs
135 // to be closer to other bytecode.cpp data.
136 bool RuntimeOption::RepoAuthoritative = false;
138 using jit::TCA;
140 // GCC 4.8 has some real problems with all the inlining in this file, so don't
141 // go overboard with that version.
142 #if DEBUG || ((__GNUC__ == 4) && (__GNUC_MINOR__ == 8))
143 #define OPTBLD_INLINE
144 #define OPTBLD_FLT_INLINE
145 #else
146 #define OPTBLD_INLINE ALWAYS_INLINE
147 #define OPTBLD_FLT_INLINE INLINE_FLATTEN
148 #endif
150 template <>
151 Class* arGetContextClassImpl<false>(const ActRec* ar) {
152 if (ar == nullptr) {
153 return nullptr;
155 return ar->m_func->cls();
158 template <>
159 Class* arGetContextClassImpl<true>(const ActRec* ar) {
160 if (ar == nullptr) {
161 return nullptr;
163 if (ar->m_func->isPseudoMain() || ar->m_func->isBuiltin()) {
164 // Pseudomains inherit the context of their caller
165 auto const context = g_context.getNoCheck();
166 ar = context->getPrevVMState(ar);
167 while (ar != nullptr &&
168 (ar->m_func->isPseudoMain() || ar->m_func->isBuiltin())) {
169 ar = context->getPrevVMState(ar);
171 if (ar == nullptr) {
172 return nullptr;
175 return ar->m_func->cls();
178 void frame_free_locals_no_hook(ActRec* fp) {
179 frame_free_locals_inl_no_hook(fp, fp->func()->numLocals());
182 const StaticString s_call_user_func("call_user_func");
183 const StaticString s_call_user_func_array("call_user_func_array");
184 const StaticString s___call("__call");
185 const StaticString s___callStatic("__callStatic");
186 const StaticString s_file("file");
187 const StaticString s_line("line");
188 const StaticString s_getWaitHandle("getWaitHandle");
190 ///////////////////////////////////////////////////////////////////////////////
192 //=============================================================================
193 // Miscellaneous decoders.
195 inline const char* prettytype(int) { return "int"; }
196 inline const char* prettytype(long) { return "long"; }
197 inline const char* prettytype(long long) { return "long long"; }
198 inline const char* prettytype(double) { return "double"; }
199 inline const char* prettytype(unsigned) { return "unsigned"; }
200 inline const char* prettytype(OODeclExistsOp) { return "OpDeclExistsOp"; }
201 inline const char* prettytype(FatalOp) { return "FatalOp"; }
202 inline const char* prettytype(IsTypeOp) { return "IsTypeOp"; }
203 inline const char* prettytype(SetOpOp) { return "SetOpOp"; }
204 inline const char* prettytype(IncDecOp) { return "IncDecOp"; }
205 inline const char* prettytype(ObjMethodOp) { return "ObjMethodOp"; }
206 inline const char* prettytype(BareThisOp) { return "BareThisOp"; }
207 inline const char* prettytype(InitPropOp) { return "InitPropOp"; }
208 inline const char* prettytype(SilenceOp) { return "SilenceOp"; }
209 inline const char* prettytype(SwitchKind) { return "SwitchKind"; }
210 inline const char* prettytype(MOpMode) { return "MOpMode"; }
211 inline const char* prettytype(QueryMOp) { return "QueryMOp"; }
213 // load a T value from *pc without incrementing
214 template<class T> T peek(PC pc) {
215 T v;
216 std::memcpy(&v, pc, sizeof v); // should compile to a load
217 ONTRACE(2, Trace::trace("decode: Immediate %s %" PRIi64"\n",
218 prettytype(v), int64_t(v)));
219 return v;
222 template<class T> T decode(PC& pc) {
223 auto v = peek<T>(pc);
224 pc += sizeof(T);
225 return v;
228 inline const StringData* decode_litstr(PC& pc) {
229 auto id = decode<Id>(pc);
230 return vmfp()->m_func->unit()->lookupLitstrId(id);
233 ALWAYS_INLINE Offset decode_ba(PC& pc) {
234 return decode<Offset>(pc);
237 // find the AR for the current FPI region using func metadata
238 static inline ActRec* arFromInstr(PC pc) {
239 const ActRec* fp = vmfp();
240 auto const func = fp->m_func;
241 if (fp->resumed()) {
242 fp = reinterpret_cast<const ActRec*>(Stack::resumableStackBase(fp) +
243 func->numSlotsInFrame());
246 return arAtOffset(fp, -instrFpToArDelta(func, pc));
249 // Find the AR for the current FPI region by indexing from sp
250 static inline ActRec* arFromSp(int32_t n) {
251 auto ar = reinterpret_cast<ActRec*>(vmStack().top() + n);
252 assert(ar == arFromInstr(vmpc()));
253 return ar;
256 ALWAYS_INLINE MOpMode fpass_mode(ActRec* ar, int paramId) {
257 assert(paramId < ar->numArgs());
258 return ar->m_func->byRef(paramId) ? MOpMode::Define : MOpMode::Warn;
261 // wrapper for local variable LA operand
262 struct local_var {
263 TypedValue* ptr;
264 int32_t index;
265 TypedValue* operator->() const { return ptr; }
266 TypedValue& operator*() const { return *ptr; }
269 // wrapper for variable-size IVA operand
270 struct intva_t {
271 int32_t n;
272 /* implicit */ operator int32_t() const { return n; }
273 intva_t& operator=(int32_t v) { n = v; return *this; }
276 // wrapper for class-ref slot CA(R|W) operand
277 struct clsref_slot {
278 LowPtr<Class>* ptr;
279 uint32_t index;
281 Class* take() const {
282 auto ret = *ptr;
283 if (debug) {
284 ret->validate();
285 memset(ptr, kTrashClsRef, sizeof(*ptr));
287 return ret.get();
290 void put(Class* cls) { *ptr = cls; }
293 // wrapper to handle unaligned access to variadic immediates
294 template<class T> struct imm_array {
295 explicit imm_array(PC pc) : ptr(pc) {}
296 PC const ptr;
297 T operator[](int32_t i) const {
298 T e;
299 memcpy(&e, ptr + i * sizeof(T), sizeof(T));
300 return e;
304 ALWAYS_INLINE local_var decode_local(PC& pc) {
305 auto la = decode_iva(pc);
306 assert(la < vmfp()->m_func->numLocals());
307 return local_var{frame_local(vmfp(), la), la};
310 ALWAYS_INLINE Iter* decode_iter(PC& pc) {
311 auto ia = decode_iva(pc);
312 return frame_iter(vmfp(), ia);
315 ALWAYS_INLINE intva_t decode_intva(PC& pc) {
316 return intva_t{decode_iva(pc)};
319 ALWAYS_INLINE clsref_slot decode_clsref_slot(PC& pc) {
320 uint32_t ca = decode_iva(pc);
321 assertx(ca < vmfp()->m_func->numClsRefSlots());
322 return clsref_slot{frame_clsref_slot(vmfp(), ca), ca};
325 //=============================================================================
326 // Miscellaneous helpers.
328 static inline Class* frameStaticClass(ActRec* fp) {
329 if (!fp->func()->cls()) return nullptr;
330 if (fp->hasThis()) {
331 return fp->getThis()->getVMClass();
333 return fp->getClass();
336 //=============================================================================
337 // VarEnv.
339 const StaticString s_GLOBALS("GLOBALS");
341 void VarEnv::createGlobal() {
342 assert(!g_context->m_globalVarEnv);
343 g_context->m_globalVarEnv = req::make_raw<VarEnv>();
346 VarEnv::VarEnv()
347 : m_nvTable()
348 , m_extraArgs(nullptr)
349 , m_depth(0)
350 , m_global(true)
352 TRACE(3, "Creating VarEnv %p [global scope]\n", this);
353 auto globals_var = Variant::attach(
354 new (MM().objMalloc(sizeof(GlobalsArray))) GlobalsArray(&m_nvTable)
356 m_nvTable.set(s_GLOBALS.get(), globals_var.asTypedValue());
359 VarEnv::VarEnv(ActRec* fp, ExtraArgs* eArgs)
360 : m_nvTable(fp)
361 , m_extraArgs(eArgs)
362 , m_depth(1)
363 , m_global(false)
365 assert(fp->func()->attrs() & AttrMayUseVV);
366 TRACE(3, "Creating lazily attached VarEnv %p on stack\n", this);
369 VarEnv::VarEnv(const VarEnv* varEnv, ActRec* fp)
370 : m_nvTable(varEnv->m_nvTable, fp)
371 , m_extraArgs(varEnv->m_extraArgs ? varEnv->m_extraArgs->clone(fp) : nullptr)
372 , m_depth(1)
373 , m_global(false)
375 assert(varEnv->m_depth == 1);
376 assert(!varEnv->m_global);
377 assert(fp->func()->attrs() & AttrMayUseVV);
379 TRACE(3, "Cloning VarEnv %p to %p\n", varEnv, this);
382 VarEnv::~VarEnv() {
383 TRACE(3, "Destroying VarEnv %p [%s]\n",
384 this,
385 isGlobalScope() ? "global scope" : "local scope");
386 assert(isGlobalScope() == (g_context->m_globalVarEnv == this));
388 if (isGlobalScope()) {
390 * When detaching the global scope, we leak any live objects (and
391 * let MemoryManager clean them up). This is because we're
392 * not supposed to run destructors for objects that are live at
393 * the end of a request.
395 m_nvTable.unset(s_GLOBALS.get());
396 m_nvTable.leak();
398 // at this point, m_nvTable is destructed, and GlobalsArray
399 // has a dangling pointer to it.
402 void VarEnv::deallocate(ActRec* fp) {
403 fp->m_varEnv->exitFP(fp);
406 VarEnv* VarEnv::createLocal(ActRec* fp) {
407 return req::make_raw<VarEnv>(fp, fp->getExtraArgs());
410 VarEnv* VarEnv::clone(ActRec* fp) const {
411 return req::make_raw<VarEnv>(this, fp);
414 void VarEnv::suspend(const ActRec* oldFP, ActRec* newFP) {
415 m_nvTable.suspend(oldFP, newFP);
418 void VarEnv::enterFP(ActRec* oldFP, ActRec* newFP) {
419 TRACE(3, "Attaching VarEnv %p [%s] %d fp @%p\n",
420 this,
421 isGlobalScope() ? "global scope" : "local scope",
422 int(newFP->m_func->numNamedLocals()), newFP);
423 assert(newFP);
424 if (oldFP == nullptr) {
425 assert(isGlobalScope() && m_depth == 0);
426 } else {
427 assertx(m_depth >= 1);
428 assertx(g_context->getPrevVMStateSkipFrame(newFP) == oldFP);
429 if (debug) {
430 auto prev = newFP;
431 while (true) {
432 prev = g_context->getPrevVMState(prev);
433 if (prev == oldFP) break;
434 assertx(!(prev->m_func->attrs() & AttrMayUseVV) || !prev->hasVarEnv());
437 m_nvTable.detach(oldFP);
440 assert(newFP->func()->attrs() & AttrMayUseVV);
441 m_nvTable.attach(newFP);
442 m_depth++;
445 void VarEnv::exitFP(ActRec* fp) {
446 TRACE(3, "Detaching VarEnv %p [%s] @%p\n",
447 this,
448 isGlobalScope() ? "global scope" : "local scope",
449 fp);
450 assert(fp);
451 assert(m_depth > 0);
453 m_depth--;
454 m_nvTable.detach(fp);
456 if (m_depth == 0) {
457 if (m_extraArgs) {
458 assert(!isGlobalScope());
459 const auto numExtra = fp->numArgs() - fp->m_func->numNonVariadicParams();
460 ExtraArgs::deallocate(m_extraArgs, numExtra);
463 // don't free global VarEnv
464 if (!isGlobalScope()) {
465 req::destroy_raw(this);
467 } else {
468 while (true) {
469 auto const prevFP = g_context->getPrevVMState(fp);
470 if (prevFP->func()->attrs() & AttrMayUseVV &&
471 prevFP->m_varEnv == this) {
472 m_nvTable.attach(prevFP);
473 break;
475 fp = prevFP;
480 void VarEnv::set(const StringData* name, const TypedValue* tv) {
481 m_nvTable.set(name, tv);
484 void VarEnv::bind(const StringData* name, TypedValue* tv) {
485 m_nvTable.bind(name, tv);
488 void VarEnv::setWithRef(const StringData* name, TypedValue* tv) {
489 if (tv->m_type == KindOfRef) {
490 bind(name, tv);
491 } else {
492 set(name, tv);
496 TypedValue* VarEnv::lookup(const StringData* name) {
497 return m_nvTable.lookup(name);
500 TypedValue* VarEnv::lookupAdd(const StringData* name) {
501 return m_nvTable.lookupAdd(name);
504 bool VarEnv::unset(const StringData* name) {
505 m_nvTable.unset(name);
506 return true;
509 const StaticString s_closure_var("0Closure");
511 Array VarEnv::getDefinedVariables() const {
512 Array ret = Array::Create();
514 NameValueTable::Iterator iter(&m_nvTable);
515 for (; iter.valid(); iter.next()) {
516 auto const sd = iter.curKey();
517 auto const tv = iter.curVal();
518 // Closures have an interal 0Closure variable
519 if (s_closure_var.equal(sd)) {
520 continue;
522 if (tvAsCVarRef(tv).isReferenced()) {
523 ret.setWithRef(StrNR(sd).asString(), tvAsCVarRef(tv));
524 } else {
525 ret.add(StrNR(sd).asString(), tvAsCVarRef(tv));
529 // Make result independent of the hashtable implementation.
530 ArrayData* sorted = ret->escalateForSort(SORTFUNC_KSORT);
531 assert(sorted == ret.get() || sorted->hasExactlyOneRef());
532 SCOPE_EXIT {
533 if (sorted != ret.get()) {
534 ret = Array::attach(sorted);
537 sorted->ksort(0, true);
539 return ret;
542 TypedValue* VarEnv::getExtraArg(unsigned argInd) const {
543 return m_extraArgs->getExtraArg(argInd);
546 //=============================================================================
548 ExtraArgs::ExtraArgs() {}
549 ExtraArgs::~ExtraArgs() {}
551 void* ExtraArgs::allocMem(unsigned nargs) {
552 assert(nargs > 0);
553 return req::malloc(
554 sizeof(TypedValue) * nargs + sizeof(ExtraArgs),
555 type_scan::getIndexForMalloc<
556 ExtraArgs,
557 type_scan::Action::WithSuffix<TypedValue>
562 ExtraArgs* ExtraArgs::allocateCopy(TypedValue* args, unsigned nargs) {
563 void* mem = allocMem(nargs);
564 ExtraArgs* ea = new (mem) ExtraArgs();
567 * The stack grows downward, so the args in memory are "backward"; i.e. the
568 * leftmost (in PHP) extra arg is highest in memory.
570 std::reverse_copy(args, args + nargs, &ea->m_extraArgs[0]);
571 return ea;
574 ExtraArgs* ExtraArgs::allocateUninit(unsigned nargs) {
575 void* mem = ExtraArgs::allocMem(nargs);
576 return new (mem) ExtraArgs();
579 void ExtraArgs::deallocate(ExtraArgs* ea, unsigned nargs) {
580 assert(nargs > 0);
581 for (unsigned i = 0; i < nargs; ++i) {
582 tvRefcountedDecRef(ea->m_extraArgs + i);
584 ea->~ExtraArgs();
585 req::free(ea);
588 void ExtraArgs::deallocate(ActRec* ar) {
589 const int numExtra = ar->numArgs() - ar->m_func->numNonVariadicParams();
590 deallocate(ar->getExtraArgs(), numExtra);
593 ExtraArgs* ExtraArgs::clone(ActRec* ar) const {
594 const int numExtra = ar->numArgs() - ar->m_func->numParams();
595 auto ret = allocateUninit(numExtra);
596 for (int i = 0; i < numExtra; ++i) {
597 tvDupFlattenVars(&m_extraArgs[i], &ret->m_extraArgs[i]);
599 return ret;
602 TypedValue* ExtraArgs::getExtraArg(unsigned argInd) const {
603 return const_cast<TypedValue*>(&m_extraArgs[argInd]);
606 //=============================================================================
607 // Stack.
609 // Store actual stack elements array in a thread-local in order to amortize the
610 // cost of allocation.
611 struct StackElms {
612 ~StackElms() { flush(); }
613 TypedValue* elms() {
614 if (m_elms == nullptr) {
615 // RuntimeOption::EvalVMStackElms-sized and -aligned.
616 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
617 if (posix_memalign((void**)&m_elms, algnSz, algnSz) != 0) {
618 throw std::runtime_error(
619 std::string("VM stack initialization failed: ") +
620 folly::errnoStr(errno).c_str());
623 madvise(m_elms, algnSz, MADV_DONTNEED);
624 numa_bind_to(m_elms, algnSz, s_numaNode);
626 return m_elms;
628 void flush() {
629 if (m_elms != nullptr) {
630 free(m_elms);
631 m_elms = nullptr;
634 private:
635 TypedValue* m_elms{nullptr};
637 IMPLEMENT_THREAD_LOCAL(StackElms, t_se);
639 const int Stack::sSurprisePageSize = sysconf(_SC_PAGESIZE);
640 // We reserve the bottom page of each stack for use as the surprise
641 // page, so the minimum useful stack size is the next power of two.
642 const uint32_t Stack::sMinStackElms =
643 2 * sSurprisePageSize / sizeof(TypedValue);
645 void Stack::ValidateStackSize() {
646 if (RuntimeOption::EvalVMStackElms < sMinStackElms) {
647 throw std::runtime_error(folly::sformat(
648 "VM stack size of {:#x} is below the minimum of {:#x}",
649 RuntimeOption::EvalVMStackElms,
650 sMinStackElms
653 if (!folly::isPowTwo(RuntimeOption::EvalVMStackElms)) {
654 throw std::runtime_error(folly::sformat(
655 "VM stack size of {:#x} is not a power of 2",
656 RuntimeOption::EvalVMStackElms
661 Stack::Stack()
662 : m_elms(nullptr), m_top(nullptr), m_base(nullptr) {
665 Stack::~Stack() {
666 requestExit();
669 void Stack::requestInit() {
670 m_elms = t_se->elms();
671 // Burn one element of the stack, to satisfy the constraint that
672 // valid m_top values always have the same high-order (>
673 // log(RuntimeOption::EvalVMStackElms)) bits.
674 m_top = m_base = m_elms + RuntimeOption::EvalVMStackElms - 1;
676 rds::header()->stackLimitAndSurprise.store(
677 reinterpret_cast<uintptr_t>(
678 reinterpret_cast<char*>(m_elms) + sSurprisePageSize +
679 kStackCheckPadding * sizeof(Cell)
681 std::memory_order_release
683 assert(!(rds::header()->stackLimitAndSurprise.load() & kSurpriseFlagMask));
685 // Because of the surprise page at the bottom of the stack we lose an
686 // additional 256 elements which must be taken into account when checking for
687 // overflow.
688 UNUSED size_t maxelms =
689 RuntimeOption::EvalVMStackElms - sSurprisePageSize / sizeof(TypedValue);
690 assert(!wouldOverflow(maxelms - 1));
691 assert(wouldOverflow(maxelms));
694 void Stack::requestExit() {
695 m_elms = nullptr;
698 void flush_evaluation_stack() {
699 if (vmStack().isAllocated()) {
700 // For RPCRequestHandler threads, the ExecutionContext can stay
701 // alive across requests, but its always ok to kill it between
702 // requests, so do so now
703 RPCRequestHandler::cleanupState();
706 MM().flush();
708 if (!t_se.isNull()) {
709 t_se->flush();
711 rds::flush();
712 json_parser_flush_caches();
714 always_assert(MM().empty());
717 static std::string toStringElm(const TypedValue* tv) {
718 std::ostringstream os;
720 if (tv->m_type < kMinDataType || tv->m_type > kMaxDataType) {
721 os << " ??? type " << tv->m_type << "\n";
722 return os.str();
724 if (isRefcountedType(tv->m_type) &&
725 !tv->m_data.pcnt->checkCount()) {
726 // OK in the invoking frame when running a destructor.
727 os << " ??? inner_count " << tvGetCount(tv) << " ";
728 return os.str();
731 auto print_count = [&] {
732 if (tv->m_data.pcnt->isStatic()) {
733 os << ":c(static)";
734 } else if (tv->m_data.pcnt->isUncounted()) {
735 os << ":c(uncounted)";
736 } else {
737 os << ":c(" << tvGetCount(tv) << ")";
741 switch (tv->m_type) {
742 case KindOfRef:
743 os << "V:(";
744 os << "@" << tv->m_data.pref;
745 os << toStringElm(tv->m_data.pref->tv());
746 os << ")";
747 return os.str();
748 case KindOfUninit:
749 case KindOfNull:
750 case KindOfBoolean:
751 case KindOfInt64:
752 case KindOfDouble:
753 case KindOfPersistentString:
754 case KindOfString:
755 case KindOfPersistentVec:
756 case KindOfVec:
757 case KindOfPersistentDict:
758 case KindOfDict:
759 case KindOfPersistentKeyset:
760 case KindOfKeyset:
761 case KindOfPersistentArray:
762 case KindOfArray:
763 case KindOfObject:
764 case KindOfResource:
765 os << "C:";
766 break;
769 do {
770 switch (tv->m_type) {
771 case KindOfUninit:
772 os << "Uninit";
773 continue;
774 case KindOfNull:
775 os << "Null";
776 continue;
777 case KindOfBoolean:
778 os << (tv->m_data.num ? "True" : "False");
779 continue;
780 case KindOfInt64:
781 os << "0x" << std::hex << tv->m_data.num << std::dec;
782 continue;
783 case KindOfDouble:
784 os << tv->m_data.dbl;
785 continue;
786 case KindOfPersistentString:
787 case KindOfString:
789 int len = tv->m_data.pstr->size();
790 bool truncated = false;
791 if (len > 128) {
792 len = 128;
793 truncated = true;
795 os << tv->m_data.pstr;
796 print_count();
797 os << ":\""
798 << escapeStringForCPP(tv->m_data.pstr->data(), len)
799 << "\"" << (truncated ? "..." : "");
801 continue;
802 case KindOfPersistentVec:
803 case KindOfVec:
804 assert(tv->m_data.parr->isVecArray());
805 assert(tv->m_data.parr->checkCount());
806 os << tv->m_data.parr;
807 print_count();
808 os << ":Vec";
809 continue;
810 case KindOfPersistentDict:
811 case KindOfDict:
812 assert(tv->m_data.parr->isDict());
813 assert(tv->m_data.parr->checkCount());
814 os << tv->m_data.parr;
815 print_count();
816 os << ":Dict";
817 continue;
818 case KindOfPersistentKeyset:
819 case KindOfKeyset:
820 assert(tv->m_data.parr->isKeyset());
821 assert(tv->m_data.parr->checkCount());
822 os << tv->m_data.parr;
823 print_count();
824 os << ":Keyset";
825 continue;
826 case KindOfPersistentArray:
827 case KindOfArray:
828 assert(tv->m_data.parr->isPHPArray());
829 assert(tv->m_data.parr->checkCount());
830 os << tv->m_data.parr;
831 print_count();
832 os << ":Array";
833 continue;
834 case KindOfObject:
835 assert(tv->m_data.pobj->checkCount());
836 os << tv->m_data.pobj;
837 print_count();
838 os << ":Object("
839 << tv->m_data.pobj->getClassName().get()->data()
840 << ")";
841 continue;
842 case KindOfResource:
843 assert(tv->m_data.pres->checkCount());
844 os << tv->m_data.pres;
845 print_count();
846 os << ":Resource("
847 << tv->m_data.pres->data()->o_getClassName().get()->data()
848 << ")";
849 continue;
850 case KindOfRef:
851 break;
853 not_reached();
854 } while (0);
856 return os.str();
859 static std::string toStringIter(const Iter* it, bool itRef) {
860 if (itRef) return "I:MutableArray";
862 // TODO(#2458166): it might be a CufIter, but we're just lucky that
863 // the bit pattern for the CufIter is going to have a 0 in
864 // getIterType for now.
865 switch (it->arr().getIterType()) {
866 case ArrayIter::TypeUndefined:
867 return "I:Undefined";
868 case ArrayIter::TypeArray:
869 return "I:Array";
870 case ArrayIter::TypeIterator:
871 return "I:Iterator";
873 assert(false);
874 return "I:?";
878 * Return true if Offset o is inside the protected region of a fault
879 * funclet for iterId, otherwise false. itRef will be set to true if
880 * the iterator was initialized with MIterInit*, false if the iterator
881 * was initialized with IterInit*.
883 static bool checkIterScope(const Func* f, Offset o, Id iterId, bool& itRef) {
884 assert(o >= f->base() && o < f->past());
885 for (auto const& eh : f->ehtab()) {
886 if (eh.m_base <= o && o < eh.m_past &&
887 eh.m_iterId == iterId) {
888 itRef = eh.m_itRef;
889 return true;
892 return false;
895 static void toStringFrame(std::ostream& os, const ActRec* fp,
896 int offset, const TypedValue* ftop,
897 const std::string& prefix, bool isTop = true) {
898 assert(fp);
900 // Use depth-first recursion to output the most deeply nested stack frame
901 // first.
903 Offset prevPc = 0;
904 TypedValue* prevStackTop = nullptr;
905 ActRec* prevFp = g_context->getPrevVMState(fp, &prevPc, &prevStackTop);
906 if (prevFp != nullptr) {
907 toStringFrame(os, prevFp, prevPc, prevStackTop, prefix, false);
911 os << prefix;
912 const Func* func = fp->m_func;
913 assert(func);
914 func->validate();
915 std::string funcName(func->fullName()->data());
916 os << "{func:" << funcName
917 << ",soff:" << fp->m_soff
918 << ",this:0x"
919 << std::hex << (func->cls() && fp->hasThis() ? fp->getThis() : nullptr)
920 << std::dec << "}";
921 TypedValue* tv = (TypedValue*)fp;
922 tv--;
924 if (func->numLocals() > 0) {
925 // Don't print locals for parent frames on a Ret(C|V) since some of them
926 // may already be destructed.
927 if (isRet(func->unit()->getOp(offset)) && !isTop) {
928 os << "<locals destroyed>";
929 } else {
930 os << "<";
931 int n = func->numLocals();
932 for (int i = 0; i < n; i++, tv--) {
933 if (i > 0) {
934 os << " ";
936 os << toStringElm(tv);
938 os << ">";
942 if (func->numIterators() > 0) {
943 os << "|";
944 Iter* it = &((Iter*)&tv[1])[-1];
945 for (int i = 0; i < func->numIterators(); i++, it--) {
946 if (i > 0) {
947 os << " ";
949 bool itRef;
950 if (checkIterScope(func, offset, i, itRef)) {
951 os << toStringIter(it, itRef);
952 } else {
953 os << "I:Undefined";
956 os << "|";
959 // Ideally we'd like to display the contents of the class-ref slots here, but
960 // we have no metadata to tell us which ones are currently occupied and valid.
962 std::vector<std::string> stackElems;
963 visitStackElems(
964 fp, ftop, offset,
965 [&](const ActRec* ar, Offset) {
966 stackElems.push_back(
967 folly::format("{{func:{}}}", ar->m_func->fullName()->data()).str()
970 [&](const TypedValue* tv) {
971 stackElems.push_back(toStringElm(tv));
974 std::reverse(stackElems.begin(), stackElems.end());
975 os << ' ' << folly::join(' ', stackElems);
977 os << '\n';
980 std::string Stack::toString(const ActRec* fp, int offset,
981 const std::string prefix/* = "" */) const {
982 // The only way to figure out which stack elements are activation records is
983 // to follow the frame chain. However, the goal for each stack frame is to
984 // print stack fragments from deepest to shallowest -- a then b in the
985 // following example:
987 // {func:foo,soff:51}<C:8> {func:bar} C:8 C:1 {func:biz} C:0
988 // aaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbb
990 // Use depth-first recursion to get the output order correct.
992 std::ostringstream os;
993 auto unit = fp->unit();
994 auto func = fp->func();
995 os << prefix << "=== Stack at "
996 << unit->filepath()->data() << ":"
997 << unit->getLineNumber(unit->offsetOf(vmpc()))
998 << " func " << func->fullName()->data() << " ===\n";
1000 toStringFrame(os, fp, offset, m_top, prefix);
1002 return os.str();
1005 bool Stack::wouldOverflow(int numCells) const {
1006 // The funny approach here is to validate the translator's assembly
1007 // technique. We've aligned and sized the stack so that the high order
1008 // bits of valid cells are all the same. In the translator, numCells
1009 // can be hardcoded, and m_top is wired into a register,
1010 // so the expression requires no loads.
1011 intptr_t truncatedTop = intptr_t(m_top) / sizeof(TypedValue);
1012 truncatedTop &= RuntimeOption::EvalVMStackElms - 1;
1013 intptr_t diff = truncatedTop - numCells -
1014 sSurprisePageSize / sizeof(TypedValue);
1015 return diff < 0;
1018 TypedValue* Stack::anyFrameStackBase(const ActRec* fp) {
1019 return fp->resumed() ? Stack::resumableStackBase(fp)
1020 : Stack::frameStackBase(fp);
1023 TypedValue* Stack::frameStackBase(const ActRec* fp) {
1024 assert(!fp->resumed());
1025 return (TypedValue*)fp - fp->func()->numSlotsInFrame();
1028 TypedValue* Stack::resumableStackBase(const ActRec* fp) {
1029 assert(fp->resumed());
1030 auto sfp = fp->sfp();
1031 if (sfp) {
1032 // The non-reentrant case occurs when a non-async or async generator is
1033 // resumed via ContEnter or ContRaise opcode. These opcodes leave a single
1034 // value on the stack that becomes part of the generator's stack. So we
1035 // find the caller's FP, compensate for its locals and iterators, and then
1036 // we've found the base of the generator's stack.
1037 assert(fp->func()->isGenerator());
1039 // Since resumables are stored on the heap, we need to go back in the
1040 // callstack a bit to find the base of the stack. Unfortunately, due to
1041 // generator delegation, this can be pretty far back...
1042 while (sfp->func()->isGenerator()) {
1043 sfp = sfp->sfp();
1046 return (TypedValue*)sfp - sfp->func()->numSlotsInFrame();
1047 } else {
1048 // The reentrant case occurs when asio scheduler resumes an async function
1049 // or async generator. We simply use the top of stack of the previous VM
1050 // frame (since the ActRec, locals, and iters for this frame do not reside
1051 // on the VM stack).
1052 assert(fp->func()->isAsync());
1053 return g_context.getNoCheck()->m_nestedVMs.back().sp;
1057 Array getDefinedVariables(const ActRec* fp) {
1058 if (UNLIKELY(fp == nullptr)) return empty_array();
1060 if ((fp->func()->attrs() & AttrMayUseVV) && fp->hasVarEnv()) {
1061 return fp->m_varEnv->getDefinedVariables();
1063 auto const func = fp->m_func;
1064 auto const numLocals = func->numNamedLocals();
1065 ArrayInit ret(numLocals, ArrayInit::Map{});
1066 for (Id id = 0; id < numLocals; ++id) {
1067 TypedValue* ptv = frame_local(fp, id);
1068 if (ptv->m_type == KindOfUninit) {
1069 continue;
1071 Variant name(func->localVarName(id), Variant::PersistentStrInit{});
1072 ret.add(name, tvAsVariant(ptv));
1074 return ret.toArray();
1077 NEVER_INLINE
1078 static void shuffleExtraStackArgs(ActRec* ar) {
1079 const Func* func = ar->m_func;
1080 assert(func);
1082 // the last (variadic) param is included in numParams (since it has a
1083 // name), but the arg in that slot should be included as the first
1084 // element of the variadic array
1085 const auto numArgs = ar->numArgs();
1086 const auto numVarArgs = numArgs - func->numNonVariadicParams();
1087 assert(numVarArgs > 0);
1089 const auto takesVariadicParam = func->hasVariadicCaptureParam();
1090 auto& stack = vmStack();
1091 if (func->attrs() & AttrMayUseVV) {
1092 auto const tvArgs = reinterpret_cast<TypedValue*>(ar) - numArgs;
1093 ar->setExtraArgs(ExtraArgs::allocateCopy(tvArgs, numVarArgs));
1094 if (takesVariadicParam) {
1095 auto varArgsArray =
1096 Array::attach(PackedArray::MakePacked(numVarArgs, tvArgs));
1097 // Incref the args (they're already referenced in extraArgs) but now
1098 // additionally referenced in varArgsArray ...
1099 auto tv = tvArgs; uint32_t i = 0;
1100 for (; i < numVarArgs; ++i, ++tv) { tvRefcountedIncRef(tv); }
1101 // ... and now remove them from the stack
1102 stack.ndiscard(numVarArgs);
1103 auto const ad = varArgsArray.detach();
1104 assert(ad->hasExactlyOneRef());
1105 stack.pushArrayNoRc(ad);
1106 // Before, for each arg: refcount = n + 1 (stack)
1107 // After, for each arg: refcount = n + 2 (ExtraArgs, varArgsArray)
1108 } else {
1109 // Discard the arguments from the stack; they were all moved
1110 // into the extra args so we don't decref.
1111 stack.ndiscard(numVarArgs);
1113 // leave ar->numArgs reflecting the actual number of args passed
1114 } else {
1115 assert(takesVariadicParam); // called only if extra args are used
1116 auto const tvArgs = reinterpret_cast<TypedValue*>(ar) - numArgs;
1117 auto varArgsArray =
1118 Array::attach(PackedArray::MakePacked(numVarArgs, tvArgs));
1119 // Discard the arguments from the stack; they were all moved into the
1120 // variadic args array so we don't need to decref the values.
1121 stack.ndiscard(numVarArgs);
1122 auto const ad = varArgsArray.detach();
1123 assert(ad->hasExactlyOneRef());
1124 stack.pushArrayNoRc(ad);
1125 assert(func->numParams() == (numArgs - numVarArgs + 1));
1126 ar->setNumArgs(func->numParams());
1130 static void shuffleMagicArgs(ActRec* ar) {
1131 assert(ar->magicDispatch());
1133 // We need to put this where the first argument is
1134 auto const invName = ar->clearMagicDispatch();
1135 int const nargs = ar->numArgs();
1137 // We need to make an array containing all the arguments passed by
1138 // the caller and put it where the second argument is.
1139 auto argArray = Array::attach(
1140 nargs ? PackedArray::MakePacked(
1141 nargs, reinterpret_cast<TypedValue*>(ar) - nargs)
1142 : staticEmptyArray()
1145 auto& stack = vmStack();
1146 // Remove the arguments from the stack; they were moved into the
1147 // array so we don't need to decref.
1148 stack.ndiscard(nargs);
1150 // Move invName to where the first argument belongs, no need
1151 // to incRef/decRef since we are transferring ownership
1152 stack.pushStringNoRc(invName);
1154 // Move argArray to where the second argument belongs. We've already
1155 // incReffed the array above so we don't need to do it here.
1156 stack.pushArrayNoRc(argArray.detach());
1158 ar->setNumArgs(2);
1159 ar->setVarEnv(nullptr);
1162 // This helper is meant to be called if an exception or invalidation takes
1163 // place in the process of function entry; the ActRec ar is on the stack
1164 // but is not (yet) the current (executing) frame and is followed by a
1165 // number of params
1166 static NEVER_INLINE void cleanupParamsAndActRec(Stack& stack,
1167 ActRec* ar,
1168 ExtraArgs* extraArgs,
1169 int* numParams) {
1170 assert(stack.top() + (numParams != nullptr ? (*numParams) :
1171 extraArgs != nullptr ? ar->m_func->numParams() :
1172 ar->numArgs())
1173 == (void*)ar);
1174 if (extraArgs) {
1175 const int numExtra = ar->numArgs() - ar->m_func->numNonVariadicParams();
1176 ExtraArgs::deallocate(extraArgs, numExtra);
1178 while (stack.top() != (void*)ar) {
1179 stack.popTV();
1181 stack.popAR();
1184 static NEVER_INLINE void shuffleMagicArrayArgs(ActRec* ar, const Cell args,
1185 Stack& stack, int nregular) {
1186 assert(ar != nullptr && ar->magicDispatch());
1187 assert(!cellIsNull(&args));
1188 assert(nregular >= 0);
1189 assert((stack.top() + nregular) == (void*) ar);
1190 assert(isContainer(args));
1191 DEBUG_ONLY const Func* f = ar->m_func;
1192 assert(f &&
1193 (f->name()->isame(s___call.get()) ||
1194 f->name()->isame(s___callStatic.get())));
1196 // We'll need to make this the first argument
1197 auto const invName = ar->clearMagicDispatch();
1199 auto nargs = getContainerSize(args);
1201 if (UNLIKELY(0 == nargs)) {
1202 // We need to make an array containing all the arguments passed by
1203 // the caller and put it where the second argument is.
1204 auto argArray = Array::attach(
1205 nregular
1206 ? PackedArray::MakePacked(
1207 nregular, reinterpret_cast<TypedValue*>(ar) - nregular)
1208 : staticEmptyArray()
1211 // Remove the arguments from the stack; they were moved into the
1212 // array so we don't need to decref.
1213 stack.ndiscard(nregular);
1215 // Move invName to where the first argument belongs, no need
1216 // to incRef/decRef since we are transferring ownership
1217 assert(stack.top() == (void*) ar);
1218 stack.pushStringNoRc(invName);
1220 // Move argArray to where the second argument belongs. We've already
1221 // incReffed the array above so we don't need to do it here.
1222 stack.pushArrayNoRc(argArray.detach());
1223 } else {
1224 if (nregular == 0
1225 && isArrayType(args.m_type)
1226 && args.m_data.parr->isVectorData()) {
1227 assert(stack.top() == (void*) ar);
1228 stack.pushStringNoRc(invName);
1229 stack.pushArray(args.m_data.parr);
1230 } else {
1231 PackedArrayInit ai(nargs + nregular);
1232 for (int i = 0; i < nregular; ++i) {
1233 // appendWithRef bumps the refcount and splits if necessary, to
1234 // compensate for the upcoming pop from the stack
1235 ai.appendWithRef(tvAsVariant(stack.top()));
1236 stack.popTV();
1238 assert(stack.top() == (void*) ar);
1239 stack.pushStringNoRc(invName);
1240 for (ArrayIter iter(args); iter; ++iter) {
1241 ai.appendWithRef(iter.secondRefPlus());
1243 stack.pushArrayNoRc(ai.create());
1247 ar->setNumArgs(2);
1248 ar->setVarEnv(nullptr);
1251 // offset is the number of params already on the stack to which the
1252 // contents of args are to be added; for call_user_func_array, this is
1253 // always 0; for unpacked arguments, it may be greater if normally passed
1254 // params precede the unpack.
1255 bool prepareArrayArgs(ActRec* ar, const Cell args, Stack& stack,
1256 int nregular, bool doCufRefParamChecks,
1257 TypedValue* retval) {
1258 assert(!cellIsNull(&args));
1259 assert(nregular >= 0);
1260 assert((stack.top() + nregular) == (void*) ar);
1261 const Func* const f = ar->m_func;
1262 assert(f);
1264 assert(isContainer(args));
1265 int const nargs = nregular + getContainerSize(args);
1266 if (UNLIKELY(ar->magicDispatch())) {
1267 shuffleMagicArrayArgs(ar, args, stack, nregular);
1268 return true;
1271 int const nparams = f->numNonVariadicParams();
1272 int nextra_regular = std::max(nregular - nparams, 0);
1273 ArrayIter iter(args);
1274 if (LIKELY(nextra_regular == 0)) {
1275 for (int i = nregular; iter && (i < nparams); ++i, ++iter) {
1276 TypedValue* from = const_cast<TypedValue*>(
1277 iter.secondRefPlus().asTypedValue());
1278 TypedValue* to = stack.allocTV();
1279 if (LIKELY(!f->byRef(i))) {
1280 cellDup(*tvToCell(from), *to);
1281 } else if (LIKELY(from->m_type == KindOfRef &&
1282 from->m_data.pref->hasMultipleRefs())) {
1283 refDup(*from, *to);
1284 } else {
1285 if (doCufRefParamChecks && f->mustBeRef(i)) {
1286 try {
1287 raise_warning("Parameter %d to %s() expected to be a reference, "
1288 "value given", i + 1, f->fullName()->data());
1289 } catch (...) {
1290 // If the user error handler throws an exception, discard the
1291 // uninitialized value(s) at the top of the eval stack so that the
1292 // unwinder doesn't choke
1293 stack.discard();
1294 if (retval) { tvWriteNull(retval); }
1295 throw;
1297 if (skipCufOnInvalidParams) {
1298 stack.discard();
1299 cleanupParamsAndActRec(stack, ar, nullptr, &i);
1300 if (retval) { tvWriteNull(retval); }
1301 return false;
1304 cellDup(*tvToCell(from), *to);
1308 if (LIKELY(!iter)) {
1309 // argArray was exhausted, so there are no "extra" arguments but there
1310 // may be a deficit of non-variadic arguments, and the need to push an
1311 // empty array for the variadic argument ... that work is left to
1312 // prepareFuncEntry. Since the stack state is going to be considered
1313 // "trimmed" over there, we need to null the extraArgs/varEnv field if
1314 // the function could read it.
1315 ar->setNumArgs(nargs);
1316 ar->trashVarEnv();
1317 if (!debug || (ar->func()->attrs() & AttrMayUseVV)) {
1318 ar->setVarEnv(nullptr);
1320 return true;
1324 // there are "extra" arguments; passed as standard arguments prior to the
1325 // ... unpack operator and/or still remaining in argArray
1326 assert(nargs > nparams);
1327 assert(nextra_regular > 0 || !!iter);
1328 if (LIKELY(f->discardExtraArgs())) {
1329 if (UNLIKELY(nextra_regular > 0)) {
1330 // if unpacking, any regularly passed arguments on the stack
1331 // in excess of those expected by the function need to be discarded
1332 // in addition to the ones held in the arry
1333 do { stack.popTV(); } while (--nextra_regular);
1336 // the extra args are not used in the function; no reason to add them
1337 // to the stack
1338 ar->setNumArgs(f->numParams());
1339 return true;
1342 auto const hasVarParam = f->hasVariadicCaptureParam();
1343 auto const extra = nargs - nparams;
1344 if (f->attrs() & AttrMayUseVV) {
1345 ExtraArgs* extraArgs = ExtraArgs::allocateUninit(extra);
1346 PackedArrayInit ai(extra);
1347 if (UNLIKELY(nextra_regular > 0)) {
1348 // The arguments are pushed in order, so we should refer them by
1349 // index instead of taking the top, that would lead to reverse order.
1350 for (int i = nextra_regular - 1; i >= 0; --i) {
1351 TypedValue* to = extraArgs->getExtraArg(nextra_regular - i - 1);
1352 const TypedValue* from = stack.indTV(i);
1353 if (from->m_type == KindOfRef && from->m_data.pref->isReferenced()) {
1354 refCopy(*from, *to);
1355 } else {
1356 cellCopy(*tvToCell(from), *to);
1358 if (hasVarParam) {
1359 // appendWithRef bumps the refcount: this accounts for the fact
1360 // that the extra args values went from being present on the stack
1361 // to being in (both) ExtraArgs and the variadic args
1362 ai.appendWithRef(tvAsCVarRef(from));
1365 stack.ndiscard(nextra_regular);
1367 for (int i = nextra_regular; i < extra; ++i, ++iter) {
1368 TypedValue* to = extraArgs->getExtraArg(i);
1369 const TypedValue* from = iter.secondRefPlus().asTypedValue();
1370 tvDupWithRef(*from, *to);
1371 if (hasVarParam) {
1372 ai.appendWithRef(iter.secondRefPlus());
1375 assert(!iter); // iter should now be exhausted
1376 if (hasVarParam) {
1377 auto const ad = ai.create();
1378 assert(ad->hasExactlyOneRef());
1379 stack.pushArrayNoRc(ad);
1381 ar->setNumArgs(nargs);
1382 ar->setExtraArgs(extraArgs);
1383 } else {
1384 assert(hasVarParam);
1385 if (nparams == nregular &&
1386 isArrayType(args.m_type) &&
1387 args.m_data.parr->isVectorData()) {
1388 stack.pushArray(args.m_data.parr);
1389 } else {
1390 PackedArrayInit ai(extra);
1391 if (UNLIKELY(nextra_regular > 0)) {
1392 // The arguments are pushed in order, so we should refer them by
1393 // index instead of taking the top, that would lead to reverse order.
1394 for (int i = nextra_regular - 1; i >= 0; --i) {
1395 // appendWithRef bumps the refcount and splits if necessary,
1396 // to compensate for the upcoming pop from the stack
1397 ai.appendWithRef(tvAsVariant(stack.indTV(i)));
1399 for (int i = 0; i < nextra_regular; ++i) {
1400 stack.popTV();
1403 for (int i = nextra_regular; i < extra; ++i, ++iter) {
1404 // appendWithRef bumps the refcount to compensate for the
1405 // eventual decref of arrayArgs.
1406 ai.appendWithRef(iter.secondRefPlus());
1408 assert(!iter); // iter should now be exhausted
1409 auto const ad = ai.create();
1410 assert(ad->hasExactlyOneRef());
1411 stack.pushArrayNoRc(ad);
1413 ar->setNumArgs(f->numParams());
1415 return true;
1418 static void prepareFuncEntry(ActRec *ar, PC& pc, StackArgsState stk) {
1419 assert(!ar->resumed());
1420 const Func* func = ar->m_func;
1421 Offset firstDVInitializer = InvalidAbsoluteOffset;
1422 bool raiseMissingArgumentWarnings = false;
1423 const int nparams = func->numNonVariadicParams();
1424 auto& stack = vmStack();
1426 if (stk == StackArgsState::Trimmed &&
1427 (ar->func()->attrs() & AttrMayUseVV) &&
1428 ar->hasExtraArgs()) {
1429 assert(nparams < ar->numArgs());
1430 } else if (UNLIKELY(ar->magicDispatch())) {
1431 // shuffleMagicArgs deals with everything. no need for further
1432 // argument munging
1433 shuffleMagicArgs(ar);
1434 } else {
1435 int nargs = ar->numArgs();
1436 if (UNLIKELY(nargs > nparams)) {
1437 if (LIKELY(stk != StackArgsState::Trimmed && func->discardExtraArgs())) {
1438 // In the common case, the function won't use the extra arguments,
1439 // so act as if they were never passed (NOTE: this has the effect
1440 // of slightly misleading backtraces that don't reflect the
1441 // discarded args)
1442 for (int i = nparams; i < nargs; ++i) { stack.popTV(); }
1443 ar->setNumArgs(nparams);
1444 } else if (stk == StackArgsState::Trimmed) {
1445 assert(nargs == func->numParams());
1446 assert(((TypedValue*)ar - stack.top()) == func->numParams());
1447 } else {
1448 shuffleExtraStackArgs(ar);
1450 } else {
1451 if (nargs < nparams) {
1452 // Push uninitialized nulls for missing arguments. Some of them may
1453 // end up getting default-initialized, but regardless, we need to
1454 // make space for them on the stack.
1455 const Func::ParamInfoVec& paramInfo = func->params();
1456 for (int i = nargs; i < nparams; ++i) {
1457 stack.pushUninit();
1458 Offset dvInitializer = paramInfo[i].funcletOff;
1459 if (dvInitializer == InvalidAbsoluteOffset) {
1460 // We wait to raise warnings until after all the locals have been
1461 // initialized. This is important because things need to be in a
1462 // consistent state in case the user error handler throws.
1463 raiseMissingArgumentWarnings = true;
1464 } else if (firstDVInitializer == InvalidAbsoluteOffset) {
1465 // This is the first unpassed arg with a default value, so
1466 // this is where we'll need to jump to.
1467 firstDVInitializer = dvInitializer;
1471 if (UNLIKELY(func->hasVariadicCaptureParam())) {
1472 stack.pushArrayNoRc(staticEmptyArray());
1474 if (func->attrs() & AttrMayUseVV) {
1475 ar->setVarEnv(nullptr);
1480 int nlocals = func->numParams();
1481 if (UNLIKELY(func->isClosureBody())) {
1482 int nuse = init_closure(ar, stack.top());
1483 // init_closure doesn't move stack
1484 stack.nalloc(nuse);
1485 nlocals += nuse;
1486 func = ar->m_func;
1489 pushFrameSlots(func, nlocals);
1491 vmfp() = ar;
1492 if (firstDVInitializer != InvalidAbsoluteOffset) {
1493 pc = func->unit()->entry() + firstDVInitializer;
1494 } else {
1495 pc = func->getEntry();
1497 // cppext functions/methods have their own logic for raising
1498 // warnings for missing arguments, so we only need to do this work
1499 // for non-cppext functions/methods
1500 if (raiseMissingArgumentWarnings && !func->isCPPBuiltin()) {
1501 // need to sync vmpc() to pc for backtraces/re-entry
1502 vmpc() = pc;
1503 HPHP::jit::raiseMissingArgument(func, ar->numArgs());
1507 static void dispatch();
1509 void enterVMAtFunc(ActRec* enterFnAr, StackArgsState stk, VarEnv* varEnv) {
1510 assert(enterFnAr);
1511 assert(!enterFnAr->resumed());
1512 Stats::inc(Stats::VMEnter);
1514 const bool useJit = RID().getJit() && !RID().getJitFolding();
1515 const bool useJitPrologue = useJit && vmfp()
1516 && !enterFnAr->magicDispatch()
1517 && !varEnv
1518 && (stk != StackArgsState::Trimmed);
1519 // The jit prologues only know how to do limited amounts of work; cannot
1520 // be used for magic call/pseudo-main/extra-args already determined or
1521 // ... or if the stack args have been explicitly been prepared (e.g. via
1522 // entry as part of invoke func).
1524 if (LIKELY(useJitPrologue)) {
1525 const int np = enterFnAr->m_func->numNonVariadicParams();
1526 int na = enterFnAr->numArgs();
1527 if (na > np) na = np + 1;
1528 jit::TCA start = enterFnAr->m_func->getPrologue(na);
1529 jit::enterTCAtPrologue(enterFnAr, start);
1530 return;
1533 if (UNLIKELY(varEnv != nullptr)) {
1534 enterFnAr->setVarEnv(varEnv);
1535 assert(enterFnAr->func()->isPseudoMain());
1536 pushFrameSlots(enterFnAr->func());
1537 auto oldFp = vmfp();
1538 if (UNLIKELY(oldFp && oldFp->skipFrame())) {
1539 oldFp = g_context->getPrevVMStateSkipFrame(oldFp);
1541 varEnv->enterFP(oldFp, enterFnAr);
1542 vmfp() = enterFnAr;
1543 vmpc() = enterFnAr->func()->getEntry();
1544 } else {
1545 prepareFuncEntry(enterFnAr, vmpc(), stk);
1548 if (!EventHook::FunctionCall(enterFnAr, EventHook::NormalFunc)) return;
1549 checkStack(vmStack(), enterFnAr->m_func, 0);
1550 assert(vmfp()->func()->contains(vmpc()));
1552 if (useJit) {
1553 jit::TCA start = enterFnAr->m_func->getFuncBody();
1554 jit::enterTCAfterPrologue(start);
1555 } else {
1556 dispatch();
1560 void enterVMAtCurPC() {
1561 assert(vmfp());
1562 assert(vmpc());
1563 assert(vmfp()->func()->contains(vmpc()));
1564 Stats::inc(Stats::VMEnter);
1565 if (RID().getJit()) {
1566 jit::enterTC();
1567 } else {
1568 dispatch();
1573 * Helper for function entry, including pseudo-main entry.
1575 void pushFrameSlots(const Func* func, int nparams /*= 0*/) {
1576 // Push locals.
1577 for (int i = nparams; i < func->numLocals(); i++) {
1578 vmStack().pushUninit();
1580 // Push iterators.
1581 for (int i = 0; i < func->numIterators(); i++) {
1582 vmStack().allocI();
1584 vmStack().allocClsRefSlots(func->numClsRefSlots());
1587 void unwindPreventReturnToTC(ActRec* ar) {
1588 auto const savedRip = reinterpret_cast<jit::TCA>(ar->m_savedRip);
1589 always_assert_flog(jit::tc::isValidCodeAddress(savedRip),
1590 "preventReturnToTC({}): {} isn't in TC",
1591 ar, savedRip);
1593 if (isReturnHelper(savedRip)) return;
1595 auto& ustubs = jit::tc::ustubs();
1596 if (ar->resumed()) {
1597 // async functions use callToExit stub
1598 assert(ar->func()->isGenerator());
1599 ar->setJitReturn(ar->func()->isAsync()
1600 ? ustubs.asyncGenRetHelper : ustubs.genRetHelper);
1601 } else {
1602 ar->setJitReturn(ustubs.retHelper);
1606 void debuggerPreventReturnToTC(ActRec* ar) {
1607 auto const savedRip = reinterpret_cast<jit::TCA>(ar->m_savedRip);
1608 always_assert_flog(jit::tc::isValidCodeAddress(savedRip),
1609 "preventReturnToTC({}): {} isn't in TC",
1610 ar, savedRip);
1612 if (isReturnHelper(savedRip) || isDebuggerReturnHelper(savedRip)) return;
1614 // We're going to smash the return address. Before we do, save the catch
1615 // block attached to the call in a side table so the return helpers and
1616 // unwinder can find it when needed.
1617 jit::stashDebuggerCatch(ar);
1619 auto& ustubs = jit::tc::ustubs();
1620 if (ar->resumed()) {
1621 // async functions use callToExit stub
1622 assert(ar->func()->isGenerator());
1623 ar->setJitReturn(ar->func()->isAsync()
1624 ? ustubs.debuggerAsyncGenRetHelper : ustubs.debuggerGenRetHelper);
1625 } else {
1626 ar->setJitReturn(ustubs.debuggerRetHelper);
1630 // Walk the stack and find any return address to jitted code and bash it to the
1631 // appropriate RetFromInterpreted*Frame helper. This ensures that we don't
1632 // return into jitted code and gives the system the proper chance to interpret
1633 // blacklisted tracelets.
1634 void debuggerPreventReturnsToTC() {
1635 assert(isDebuggerAttached());
1636 if (!RuntimeOption::EvalJit) return;
1638 auto& ec = *g_context;
1639 for (auto ar = vmfp(); ar; ar = ec.getPrevVMState(ar)) {
1640 debuggerPreventReturnToTC(ar);
1644 static inline StringData* lookup_name(TypedValue* key) {
1645 return prepareKey(*key);
1648 static inline void lookup_var(ActRec* fp,
1649 StringData*& name,
1650 TypedValue* key,
1651 TypedValue*& val) {
1652 name = lookup_name(key);
1653 const Func* func = fp->m_func;
1654 Id id = func->lookupVarId(name);
1655 if (id != kInvalidId) {
1656 val = frame_local(fp, id);
1657 } else {
1658 assert(fp->func()->attrs() & AttrMayUseVV);
1659 if (fp->hasVarEnv()) {
1660 val = fp->m_varEnv->lookup(name);
1661 } else {
1662 val = nullptr;
1667 static inline void lookupd_var(ActRec* fp,
1668 StringData*& name,
1669 TypedValue* key,
1670 TypedValue*& val) {
1671 name = lookup_name(key);
1672 auto const func = fp->m_func;
1673 Id id = func->lookupVarId(name);
1674 if (id != kInvalidId) {
1675 val = frame_local(fp, id);
1676 } else {
1677 assert(func->attrs() & AttrMayUseVV);
1678 if (!fp->hasVarEnv()) {
1679 fp->setVarEnv(VarEnv::createLocal(fp));
1681 val = fp->m_varEnv->lookup(name);
1682 if (val == nullptr) {
1683 TypedValue tv;
1684 tvWriteNull(&tv);
1685 fp->m_varEnv->set(name, &tv);
1686 val = fp->m_varEnv->lookup(name);
1691 static inline void lookup_gbl(ActRec* fp,
1692 StringData*& name,
1693 TypedValue* key,
1694 TypedValue*& val) {
1695 name = lookup_name(key);
1696 assert(g_context->m_globalVarEnv);
1697 val = g_context->m_globalVarEnv->lookup(name);
1700 static inline void lookupd_gbl(ActRec* fp,
1701 StringData*& name,
1702 TypedValue* key,
1703 TypedValue*& val) {
1704 name = lookup_name(key);
1705 assert(g_context->m_globalVarEnv);
1706 VarEnv* varEnv = g_context->m_globalVarEnv;
1707 val = varEnv->lookup(name);
1708 if (val == nullptr) {
1709 TypedValue tv;
1710 tvWriteNull(&tv);
1711 varEnv->set(name, &tv);
1712 val = varEnv->lookup(name);
1716 static inline void lookup_sprop(ActRec* fp,
1717 Class* cls,
1718 StringData*& name,
1719 TypedValue* key,
1720 TypedValue*& val,
1721 bool& visible,
1722 bool& accessible) {
1723 name = lookup_name(key);
1724 auto const ctx = arGetContextClass(fp);
1726 auto const lookup = cls->getSProp(ctx, name);
1728 val = lookup.prop;
1729 visible = lookup.prop != nullptr;
1730 accessible = lookup.accessible;
1733 static inline Class* lookupClsRef(Cell* input) {
1734 Class* class_ = nullptr;
1735 if (isStringType(input->m_type)) {
1736 class_ = Unit::loadClass(input->m_data.pstr);
1737 if (class_ == nullptr) {
1738 raise_error(Strings::UNKNOWN_CLASS, input->m_data.pstr->data());
1740 } else if (input->m_type == KindOfObject) {
1741 class_ = input->m_data.pobj->getVMClass();
1742 } else {
1743 raise_error("Cls: Expected string or object");
1745 return class_;
1748 static UNUSED int innerCount(const TypedValue* tv) {
1749 if (isRefcountedType(tv->m_type)) {
1750 return tv->m_type == KindOfRef ? tv->m_data.pref->getRealCount() :
1751 tvGetCount(tv);
1753 return -1;
1756 static inline TypedValue* ratchetRefs(TypedValue* result, TypedValue& tvRef,
1757 TypedValue& tvRef2) {
1758 TRACE(5, "Ratchet: result %p(k%d c%d), ref %p(k%d c%d) ref2 %p(k%d c%d)\n",
1759 result, result->m_type, innerCount(result),
1760 &tvRef, tvRef.m_type, innerCount(&tvRef),
1761 &tvRef2, tvRef2.m_type, innerCount(&tvRef2));
1762 // Due to complications associated with ArrayAccess, it is possible to acquire
1763 // a reference as a side effect of vector operation processing. Such a
1764 // reference must be retained until after the next iteration is complete.
1765 // Therefore, move the reference from tvRef to tvRef2, so that the reference
1766 // will be released one iteration later. But only do this if tvRef was used in
1767 // this iteration, otherwise we may wipe out the last reference to something
1768 // that we need to stay alive until the next iteration.
1769 if (tvRef.m_type != KindOfUninit) {
1770 if (isRefcountedType(tvRef2.m_type)) {
1771 tvDecRef(&tvRef2);
1772 TRACE(5, "Ratchet: decref tvref2\n");
1773 tvWriteUninit(&tvRef2);
1776 memcpy(&tvRef2, &tvRef, sizeof(TypedValue));
1777 tvWriteUninit(&tvRef);
1778 // Update result to point to relocated reference. This can be done
1779 // unconditionally here because we maintain the invariant throughout that
1780 // either tvRef is KindOfUninit, or tvRef contains a valid object that
1781 // result points to.
1782 assert(result == &tvRef);
1783 return &tvRef2;
1786 assert(result != &tvRef);
1787 return result;
1791 * One iop* function exists for every bytecode. They all take a single PC&
1792 * argument, which should be left pointing to the next bytecode to execute when
1793 * the instruction is complete. Most return void, though a few return a
1794 * jit::TCA. The ones that return a TCA return a non-nullptr value to indicate
1795 * that the caller must resume execution in the TC at the returned
1796 * address. This is used to maintain certain invariants about how we get into
1797 * and out of VM frames in jitted code; see comments on jitReturnPre() for more
1798 * details.
1801 OPTBLD_INLINE void iopLowInvalid() {
1802 fprintf(stderr, "invalid bytecode executed\n");
1803 abort();
1806 OPTBLD_INLINE void iopHighInvalid() {
1807 fprintf(stderr, "invalid bytecode executed\n");
1808 abort();
1811 OPTBLD_INLINE void iopNop() {
1814 OPTBLD_INLINE void iopEntryNop() {
1817 OPTBLD_INLINE void iopDiscardClsRef(clsref_slot slot) {
1818 slot.take();
1821 OPTBLD_INLINE void iopPopC() {
1822 vmStack().popC();
1825 OPTBLD_INLINE void iopPopV() {
1826 vmStack().popV();
1829 OPTBLD_INLINE void iopPopR() {
1830 if (vmStack().topTV()->m_type != KindOfRef) {
1831 vmStack().popC();
1832 } else {
1833 vmStack().popV();
1837 OPTBLD_INLINE void iopPopU() {
1838 vmStack().popU();
1841 OPTBLD_INLINE void iopDup() {
1842 vmStack().dup();
1845 OPTBLD_INLINE void iopBox() {
1846 vmStack().box();
1849 OPTBLD_INLINE void iopUnbox() {
1850 vmStack().unbox();
1853 OPTBLD_INLINE void iopBoxR() {
1854 TypedValue* tv = vmStack().topTV();
1855 if (tv->m_type != KindOfRef) {
1856 tvBox(tv);
1860 OPTBLD_INLINE void iopBoxRNop() {
1861 assert(refIsPlausible(*vmStack().topTV()));
1864 OPTBLD_INLINE void iopUnboxR() {
1865 if (vmStack().topTV()->m_type == KindOfRef) {
1866 vmStack().unbox();
1870 OPTBLD_INLINE void iopUnboxRNop() {
1871 assert(cellIsPlausible(*vmStack().topTV()));
1874 OPTBLD_INLINE void iopRGetCNop() {
1877 OPTBLD_INLINE void iopCGetCUNop() {
1880 OPTBLD_INLINE void iopUGetCUNop() {
1883 OPTBLD_INLINE void iopNull() {
1884 vmStack().pushNull();
1887 OPTBLD_INLINE void iopNullUninit() {
1888 vmStack().pushNullUninit();
1891 OPTBLD_INLINE void iopTrue() {
1892 vmStack().pushBool(true);
1895 OPTBLD_INLINE void iopFalse() {
1896 vmStack().pushBool(false);
1899 OPTBLD_INLINE void iopFile() {
1900 auto s = vmfp()->m_func->unit()->filepath();
1901 vmStack().pushStaticString(s);
1904 OPTBLD_INLINE void iopDir() {
1905 auto s = vmfp()->m_func->unit()->dirpath();
1906 vmStack().pushStaticString(s);
1909 OPTBLD_INLINE void iopMethod() {
1910 auto s = vmfp()->m_func->fullName();
1911 vmStack().pushStaticString(s);
1914 OPTBLD_INLINE void iopClsRefName(clsref_slot slot) {
1915 auto const cls = slot.take();
1916 auto const name = cls->name();
1917 vmStack().pushStaticString(name);
1920 OPTBLD_INLINE void iopInt(int64_t imm) {
1921 vmStack().pushInt(imm);
1924 OPTBLD_INLINE void iopDouble(double imm) {
1925 vmStack().pushDouble(imm);
1928 OPTBLD_INLINE void iopString(const StringData* s) {
1929 vmStack().pushStaticString(s);
1932 OPTBLD_INLINE void iopArray(const ArrayData* a) {
1933 assert(a->isPHPArray());
1934 vmStack().pushStaticArray(a);
1937 OPTBLD_INLINE void iopDict(const ArrayData* a) {
1938 assert(a->isDict());
1939 vmStack().pushStaticDict(a);
1942 OPTBLD_INLINE void iopKeyset(const ArrayData* a) {
1943 assert(a->isKeyset());
1944 vmStack().pushStaticKeyset(a);
1947 OPTBLD_INLINE void iopVec(const ArrayData* a) {
1948 assert(a->isVecArray());
1949 vmStack().pushStaticVec(a);
1952 OPTBLD_INLINE void iopNewArray(intva_t capacity) {
1953 if (capacity == 0) {
1954 vmStack().pushArrayNoRc(staticEmptyArray());
1955 } else {
1956 vmStack().pushArrayNoRc(PackedArray::MakeReserve(capacity));
1960 OPTBLD_INLINE void iopNewMixedArray(intva_t capacity) {
1961 if (capacity == 0) {
1962 vmStack().pushArrayNoRc(staticEmptyArray());
1963 } else {
1964 vmStack().pushArrayNoRc(MixedArray::MakeReserveMixed(capacity));
1968 OPTBLD_INLINE void iopNewDictArray(intva_t capacity) {
1969 if (capacity == 0) {
1970 vmStack().pushDictNoRc(staticEmptyDictArray());
1971 } else {
1972 vmStack().pushDictNoRc(MixedArray::MakeReserveDict(capacity));
1976 OPTBLD_INLINE
1977 void iopNewLikeArrayL(local_var fr, intva_t capacity) {
1978 ArrayData* arr;
1979 if (LIKELY(isArrayType(fr->m_type))) {
1980 arr = MixedArray::MakeReserveLike(fr->m_data.parr, capacity);
1981 } else {
1982 if (capacity == 0) capacity = PackedArray::SmallSize;
1983 arr = PackedArray::MakeReserve(capacity);
1985 vmStack().pushArrayNoRc(arr);
1988 OPTBLD_INLINE void iopNewPackedArray(intva_t n) {
1989 // This constructor moves values, no inc/decref is necessary.
1990 auto* a = PackedArray::MakePacked(n, vmStack().topC());
1991 vmStack().ndiscard(n);
1992 vmStack().pushArrayNoRc(a);
1995 OPTBLD_INLINE void iopNewStructArray(int32_t n, imm_array<int32_t> ids) {
1996 assert(n > 0 && n <= MixedArray::MaxStructMakeSize);
1997 req::vector<const StringData*> names;
1998 names.reserve(n);
1999 auto unit = vmfp()->m_func->unit();
2000 for (size_t i = 0; i < n; ++i) {
2001 auto name = unit->lookupLitstrId(ids[i]);
2002 names.push_back(name);
2005 // This constructor moves values, no inc/decref is necessary.
2006 auto a = MixedArray::MakeStruct(
2008 names.data(),
2009 vmStack().topC()
2010 )->asArrayData();
2011 vmStack().ndiscard(n);
2012 vmStack().pushArrayNoRc(a);
2015 OPTBLD_INLINE void iopNewVecArray(intva_t n) {
2016 // This constructor moves values, no inc/decref is necessary.
2017 auto* a = PackedArray::MakeVec(n, vmStack().topC());
2018 vmStack().ndiscard(n);
2019 vmStack().pushVecNoRc(a);
2022 OPTBLD_INLINE void iopNewKeysetArray(intva_t n) {
2023 // This constructor moves values, no inc/decref is necessary.
2024 auto* a = SetArray::MakeSet(n, vmStack().topC());
2025 vmStack().ndiscard(n);
2026 vmStack().pushKeysetNoRc(a);
2029 OPTBLD_INLINE void iopAddElemC() {
2030 Cell* c1 = vmStack().topC();
2031 Cell* c2 = vmStack().indC(1);
2032 Cell* c3 = vmStack().indC(2);
2033 if (!isArrayType(c3->m_type) && !isDictType(c3->m_type)) {
2034 raise_error("AddElemC: $3 must be an array or dict");
2036 if (c2->m_type == KindOfInt64) {
2037 cellAsVariant(*c3).asArrRef().set(c2->m_data.num, tvAsCVarRef(c1));
2038 } else {
2039 cellAsVariant(*c3).asArrRef().set(tvAsCVarRef(c2), tvAsCVarRef(c1));
2041 vmStack().popC();
2042 vmStack().popC();
2045 OPTBLD_INLINE void iopAddElemV() {
2046 Ref* r1 = vmStack().topV();
2047 Cell* c2 = vmStack().indC(1);
2048 Cell* c3 = vmStack().indC(2);
2049 if (!isArrayType(c3->m_type) && !isDictType(c3->m_type)) {
2050 raise_error("AddElemV: $3 must be an array or dict");
2052 if (c2->m_type == KindOfInt64) {
2053 cellAsVariant(*c3).asArrRef().setRef(c2->m_data.num, tvAsVariant(r1));
2054 } else {
2055 cellAsVariant(*c3).asArrRef().setRef(tvAsCVarRef(c2), tvAsVariant(r1));
2057 vmStack().popV();
2058 vmStack().popC();
2061 OPTBLD_INLINE void iopAddNewElemC() {
2062 Cell* c1 = vmStack().topC();
2063 Cell* c2 = vmStack().indC(1);
2064 if (!isArrayType(c2->m_type)) {
2065 raise_error("AddNewElemC: $2 must be an array");
2067 cellAsVariant(*c2).asArrRef().append(tvAsCVarRef(c1));
2068 vmStack().popC();
2071 OPTBLD_INLINE void iopAddNewElemV() {
2072 Ref* r1 = vmStack().topV();
2073 Cell* c2 = vmStack().indC(1);
2074 if (!isArrayType(c2->m_type)) {
2075 raise_error("AddNewElemV: $2 must be an array");
2077 cellAsVariant(*c2).asArrRef().appendRef(tvAsVariant(r1));
2078 vmStack().popV();
2081 OPTBLD_INLINE void iopNewCol(intva_t type) {
2082 auto cType = static_cast<CollectionType>(type.n);
2083 // Incref the collection object during construction.
2084 auto obj = collections::alloc(cType);
2085 vmStack().pushObjectNoRc(obj);
2088 OPTBLD_INLINE void iopColFromArray(intva_t type) {
2089 auto const cType = static_cast<CollectionType>(type.n);
2090 auto const c1 = vmStack().topC();
2091 // This constructor reassociates the ArrayData with the collection, so no
2092 // inc/decref is needed for the array. The collection object itself is
2093 // increfed.
2094 auto obj = collections::alloc(cType, c1->m_data.parr);
2095 vmStack().discard();
2096 vmStack().pushObjectNoRc(obj);
2099 OPTBLD_INLINE void iopColAddNewElemC() {
2100 Cell* c1 = vmStack().topC();
2101 Cell* c2 = vmStack().indC(1);
2102 assert(c2->m_type == KindOfObject && c2->m_data.pobj->isCollection());
2103 collections::initElem(c2->m_data.pobj, c1);
2104 vmStack().popC();
2107 OPTBLD_INLINE void iopMapAddElemC() {
2108 Cell* c1 = vmStack().topC();
2109 Cell* c2 = vmStack().indC(1);
2110 Cell* c3 = vmStack().indC(2);
2111 assert(c3->m_type == KindOfObject && c3->m_data.pobj->isCollection());
2112 collections::initMapElem(c3->m_data.pobj, c2, c1);
2113 vmStack().popC();
2114 vmStack().popC();
2117 OPTBLD_INLINE void iopCns(const StringData* s) {
2118 auto const cns = Unit::loadCns(s);
2119 if (cns == nullptr) {
2120 raise_notice(Strings::UNDEFINED_CONSTANT, s->data(), s->data());
2121 vmStack().pushStaticString(s);
2122 return;
2124 auto const c1 = vmStack().allocC();
2125 cellDup(*cns, *c1);
2128 OPTBLD_INLINE void iopCnsE(const StringData* s) {
2129 auto const cns = Unit::loadCns(s);
2130 if (cns == nullptr) {
2131 raise_error("Undefined constant '%s'", s->data());
2133 auto const c1 = vmStack().allocC();
2134 cellDup(*cns, *c1);
2137 OPTBLD_INLINE void iopCnsU(const StringData* name, const StringData* fallback) {
2138 auto cns = Unit::loadCns(name);
2139 if (cns == nullptr) {
2140 cns = Unit::loadCns(fallback);
2141 if (cns == nullptr) {
2142 raise_notice(
2143 Strings::UNDEFINED_CONSTANT,
2144 fallback->data(),
2145 fallback->data()
2147 vmStack().pushStaticString(fallback);
2148 return;
2151 auto const c1 = vmStack().allocC();
2152 cellDup(*cns, *c1);
2155 OPTBLD_INLINE void iopDefCns(const StringData* s) {
2156 bool result = Unit::defCns(s, vmStack().topTV());
2157 vmStack().replaceTV<KindOfBoolean>(result);
2160 OPTBLD_INLINE void iopClsCns(const StringData* clsCnsName, clsref_slot slot) {
2161 auto const cls = slot.take();
2162 auto const clsCns = cls->clsCnsGet(clsCnsName);
2164 if (clsCns.m_type == KindOfUninit) {
2165 raise_error("Couldn't find constant %s::%s",
2166 cls->name()->data(), clsCnsName->data());
2169 cellDup(clsCns, *vmStack().allocTV());
2172 OPTBLD_INLINE void iopClsCnsD(const StringData* clsCnsName, Id classId) {
2173 const NamedEntityPair& classNamedEntity =
2174 vmfp()->m_func->unit()->lookupNamedEntityPairId(classId);
2175 auto const clsCns = g_context->lookupClsCns(classNamedEntity.second,
2176 classNamedEntity.first, clsCnsName);
2177 auto const c1 = vmStack().allocC();
2178 cellDup(clsCns, *c1);
2181 OPTBLD_FLT_INLINE void iopConcat() {
2182 auto const c1 = vmStack().topC();
2183 auto const c2 = vmStack().indC(1);
2184 auto const s2 = cellAsVariant(*c2).toString();
2185 auto const s1 = cellAsCVarRef(*c1).toString();
2186 cellAsVariant(*c2) = concat(s2, s1);
2187 assert(c2->m_data.pstr->checkCount());
2188 vmStack().popC();
2191 OPTBLD_INLINE void iopConcatN(intva_t n) {
2192 auto const c1 = vmStack().topC();
2193 auto const c2 = vmStack().indC(1);
2195 if (n == 2) {
2196 auto const s2 = cellAsVariant(*c2).toString();
2197 auto const s1 = cellAsCVarRef(*c1).toString();
2198 cellAsVariant(*c2) = concat(s2, s1);
2199 assert(c2->m_data.pstr->checkCount());
2200 } else if (n == 3) {
2201 auto const c3 = vmStack().indC(2);
2202 auto const s3 = cellAsVariant(*c3).toString();
2203 auto const s2 = cellAsCVarRef(*c2).toString();
2204 auto const s1 = cellAsCVarRef(*c1).toString();
2205 cellAsVariant(*c3) = concat3(s3, s2, s1);
2206 assert(c3->m_data.pstr->checkCount());
2207 } else {
2208 assert(n == 4);
2209 auto const c3 = vmStack().indC(2);
2210 auto const c4 = vmStack().indC(3);
2211 auto const s4 = cellAsVariant(*c4).toString();
2212 auto const s3 = cellAsCVarRef(*c3).toString();
2213 auto const s2 = cellAsCVarRef(*c2).toString();
2214 auto const s1 = cellAsCVarRef(*c1).toString();
2215 cellAsVariant(*c4) = concat4(s4, s3, s2, s1);
2216 assert(c4->m_data.pstr->checkCount());
2219 for (int i = 1; i < n; ++i) {
2220 vmStack().popC();
2224 OPTBLD_INLINE void iopNot() {
2225 Cell* c1 = vmStack().topC();
2226 cellAsVariant(*c1) = !cellAsVariant(*c1).toBoolean();
2229 template<class Fn>
2230 OPTBLD_INLINE void implCellBinOp(Fn fn) {
2231 auto const c1 = vmStack().topC();
2232 auto const c2 = vmStack().indC(1);
2233 auto const result = fn(*c2, *c1);
2234 tvRefcountedDecRef(c2);
2235 *c2 = result;
2236 vmStack().popC();
2239 template<class Fn>
2240 OPTBLD_INLINE void implCellBinOpBool(Fn fn) {
2241 auto const c1 = vmStack().topC();
2242 auto const c2 = vmStack().indC(1);
2243 bool const result = fn(*c2, *c1);
2244 tvRefcountedDecRef(c2);
2245 *c2 = make_tv<KindOfBoolean>(result);
2246 vmStack().popC();
2249 template<class Fn>
2250 OPTBLD_INLINE void implCellBinOpInt64(Fn fn) {
2251 auto const c1 = vmStack().topC();
2252 auto const c2 = vmStack().indC(1);
2253 auto const result = fn(*c2, *c1);
2254 tvRefcountedDecRef(c2);
2255 *c2 = make_tv<KindOfInt64>(result);
2256 vmStack().popC();
2259 OPTBLD_INLINE void iopAdd() {
2260 implCellBinOp(cellAdd);
2263 OPTBLD_INLINE void iopSub() {
2264 implCellBinOp(cellSub);
2267 OPTBLD_INLINE void iopMul() {
2268 implCellBinOp(cellMul);
2271 OPTBLD_INLINE void iopAddO() {
2272 implCellBinOp(cellAddO);
2275 OPTBLD_INLINE void iopSubO() {
2276 implCellBinOp(cellSubO);
2279 OPTBLD_INLINE void iopMulO() {
2280 implCellBinOp(cellMulO);
2283 OPTBLD_INLINE void iopDiv() {
2284 implCellBinOp(cellDiv);
2287 OPTBLD_INLINE void iopPow() {
2288 implCellBinOp(cellPow);
2291 OPTBLD_INLINE void iopMod() {
2292 implCellBinOp(cellMod);
2295 OPTBLD_INLINE void iopBitAnd() {
2296 implCellBinOp(cellBitAnd);
2299 OPTBLD_INLINE void iopBitOr() {
2300 implCellBinOp(cellBitOr);
2303 OPTBLD_INLINE void iopBitXor() {
2304 implCellBinOp(cellBitXor);
2307 OPTBLD_INLINE void iopXor() {
2308 implCellBinOpBool([&] (Cell c1, Cell c2) -> bool {
2309 return cellToBool(c1) ^ cellToBool(c2);
2313 OPTBLD_INLINE void iopSame() {
2314 implCellBinOpBool(cellSame);
2317 OPTBLD_INLINE void iopNSame() {
2318 implCellBinOpBool([&] (Cell c1, Cell c2) {
2319 return !cellSame(c1, c2);
2323 OPTBLD_INLINE void iopEq() {
2324 implCellBinOpBool([&] (Cell c1, Cell c2) {
2325 return cellEqual(c1, c2);
2329 OPTBLD_INLINE void iopNeq() {
2330 implCellBinOpBool([&] (Cell c1, Cell c2) {
2331 return !cellEqual(c1, c2);
2335 OPTBLD_INLINE void iopLt() {
2336 implCellBinOpBool([&] (Cell c1, Cell c2) {
2337 return cellLess(c1, c2);
2341 OPTBLD_INLINE void iopLte() {
2342 implCellBinOpBool(cellLessOrEqual);
2345 OPTBLD_INLINE void iopGt() {
2346 implCellBinOpBool([&] (Cell c1, Cell c2) {
2347 return cellGreater(c1, c2);
2351 OPTBLD_INLINE void iopGte() {
2352 implCellBinOpBool(cellGreaterOrEqual);
2355 OPTBLD_INLINE void iopCmp() {
2356 implCellBinOpInt64([&] (Cell c1, Cell c2) {
2357 return cellCompare(c1, c2);
2361 OPTBLD_INLINE void iopShl() {
2362 implCellBinOp(cellShl);
2365 OPTBLD_INLINE void iopShr() {
2366 implCellBinOp(cellShr);
2369 OPTBLD_INLINE void iopBitNot() {
2370 cellBitNot(*vmStack().topC());
2373 OPTBLD_INLINE void iopCastBool() {
2374 Cell* c1 = vmStack().topC();
2375 tvCastToBooleanInPlace(c1);
2378 OPTBLD_INLINE void iopCastInt() {
2379 Cell* c1 = vmStack().topC();
2380 tvCastToInt64InPlace(c1);
2383 OPTBLD_INLINE void iopCastDouble() {
2384 Cell* c1 = vmStack().topC();
2385 tvCastToDoubleInPlace(c1);
2388 OPTBLD_INLINE void iopCastString() {
2389 Cell* c1 = vmStack().topC();
2390 tvCastToStringInPlace(c1);
2393 OPTBLD_INLINE void iopCastArray() {
2394 Cell* c1 = vmStack().topC();
2395 tvCastToArrayInPlace(c1);
2398 OPTBLD_INLINE void iopCastObject() {
2399 Cell* c1 = vmStack().topC();
2400 tvCastToObjectInPlace(c1);
2403 OPTBLD_INLINE void iopCastDict() {
2404 Cell* c1 = vmStack().topC();
2405 tvCastToDictInPlace(c1);
2408 OPTBLD_INLINE void iopCastKeyset() {
2409 Cell* c1 = vmStack().topC();
2410 tvCastToKeysetInPlace(c1);
2413 OPTBLD_INLINE void iopCastVec() {
2414 Cell* c1 = vmStack().topC();
2415 tvCastToVecInPlace(c1);
2418 OPTBLD_INLINE bool cellInstanceOf(TypedValue* tv, const NamedEntity* ne) {
2419 assert(tv->m_type != KindOfRef);
2420 Class* cls = nullptr;
2421 switch (tv->m_type) {
2422 case KindOfUninit:
2423 case KindOfNull:
2424 case KindOfBoolean:
2425 case KindOfResource:
2426 return false;
2428 case KindOfInt64:
2429 cls = Unit::lookupClass(ne);
2430 return cls && interface_supports_int(cls->name());
2432 case KindOfDouble:
2433 cls = Unit::lookupClass(ne);
2434 return cls && interface_supports_double(cls->name());
2436 case KindOfPersistentString:
2437 case KindOfString:
2438 cls = Unit::lookupClass(ne);
2439 return cls && interface_supports_string(cls->name());
2441 case KindOfPersistentVec:
2442 case KindOfVec:
2443 cls = Unit::lookupClass(ne);
2444 return cls && interface_supports_vec(cls->name());
2446 case KindOfPersistentDict:
2447 case KindOfDict:
2448 cls = Unit::lookupClass(ne);
2449 return cls && interface_supports_dict(cls->name());
2451 case KindOfPersistentKeyset:
2452 case KindOfKeyset:
2453 cls = Unit::lookupClass(ne);
2454 return cls && interface_supports_keyset(cls->name());
2456 case KindOfPersistentArray:
2457 case KindOfArray:
2458 cls = Unit::lookupClass(ne);
2459 return cls && interface_supports_array(cls->name());
2461 case KindOfObject:
2462 cls = Unit::lookupClass(ne);
2463 return cls && tv->m_data.pobj->instanceof(cls);
2465 case KindOfRef:
2466 break;
2468 not_reached();
2471 ALWAYS_INLINE
2472 bool implInstanceOfHelper(const StringData* str1, Cell* c2) {
2473 const NamedEntity* rhs = NamedEntity::get(str1, false);
2474 // Because of other codepaths, an un-normalized name might enter the
2475 // table without a Class* so we need to check if it's there.
2476 if (LIKELY(rhs && rhs->getCachedClass() != nullptr)) {
2477 return cellInstanceOf(c2, rhs);
2479 return false;
2482 OPTBLD_INLINE void iopInstanceOf() {
2483 Cell* c1 = vmStack().topC(); // c2 instanceof c1
2484 Cell* c2 = vmStack().indC(1);
2485 bool r = false;
2486 if (isStringType(c1->m_type)) {
2487 r = implInstanceOfHelper(c1->m_data.pstr, c2);
2488 } else if (c1->m_type == KindOfObject) {
2489 if (c2->m_type == KindOfObject) {
2490 ObjectData* lhs = c2->m_data.pobj;
2491 ObjectData* rhs = c1->m_data.pobj;
2492 r = lhs->instanceof(rhs->getVMClass());
2494 } else {
2495 raise_error("Class name must be a valid object or a string");
2497 vmStack().popC();
2498 vmStack().replaceC<KindOfBoolean>(r);
2501 OPTBLD_INLINE void iopInstanceOfD(Id id) {
2502 const NamedEntity* ne = vmfp()->m_func->unit()->lookupNamedEntityId(id);
2503 Cell* c1 = vmStack().topC();
2504 bool r = cellInstanceOf(c1, ne);
2505 vmStack().replaceC<KindOfBoolean>(r);
2508 OPTBLD_INLINE void iopPrint() {
2509 Cell* c1 = vmStack().topC();
2510 g_context->write(cellAsVariant(*c1).toString());
2511 vmStack().replaceC<KindOfInt64>(1);
2514 OPTBLD_INLINE void iopClone() {
2515 TypedValue* tv = vmStack().topTV();
2516 if (tv->m_type != KindOfObject) {
2517 raise_error("clone called on non-object");
2519 ObjectData* obj = tv->m_data.pobj;
2520 const Class* class_ UNUSED = obj->getVMClass();
2521 ObjectData* newobj = obj->clone();
2522 vmStack().popTV();
2523 vmStack().pushNull();
2524 tv->m_type = KindOfObject;
2525 tv->m_data.pobj = newobj;
2528 OPTBLD_INLINE void iopVarEnvDynCall() {
2529 auto const func = vmfp()->func();
2530 assertx(func->accessesCallerFrame());
2531 assertx(func->dynCallTarget());
2532 assertx(!func->dynCallWrapper());
2533 raise_disallowed_dynamic_call(func->dynCallTarget());
2536 OPTBLD_INLINE void iopExit() {
2537 int exitCode = 0;
2538 Cell* c1 = vmStack().topC();
2539 if (c1->m_type == KindOfInt64) {
2540 exitCode = c1->m_data.num;
2541 } else {
2542 g_context->write(cellAsVariant(*c1).toString());
2544 vmStack().popC();
2545 vmStack().pushNull();
2546 throw ExitException(exitCode);
2549 OPTBLD_INLINE void iopFatal(FatalOp kind_char) {
2550 TypedValue* top = vmStack().topTV();
2551 std::string msg;
2552 if (isStringType(top->m_type)) {
2553 msg = top->m_data.pstr->data();
2554 } else {
2555 msg = "Fatal error message not a string";
2557 vmStack().popTV();
2559 switch (kind_char) {
2560 case FatalOp::RuntimeOmitFrame:
2561 raise_error_without_first_frame(msg);
2562 break;
2563 case FatalOp::Runtime:
2564 case FatalOp::Parse:
2565 raise_error(msg);
2566 break;
2570 OPTBLD_INLINE void jmpSurpriseCheck(Offset offset) {
2571 if (offset <= 0 && UNLIKELY(checkSurpriseFlags())) {
2572 auto const flags = handle_request_surprise();
2574 // Memory Threhsold callback should also be fired here
2575 if (flags & MemThresholdFlag) {
2576 EventHook::DoMemoryThresholdCallback();
2581 OPTBLD_INLINE void iopJmp(PC& pc, PC targetpc) {
2582 jmpSurpriseCheck(targetpc - pc);
2583 pc = targetpc;
2586 OPTBLD_INLINE void iopJmpNS(PC& pc, PC targetpc) {
2587 pc = targetpc;
2590 template<Op op>
2591 OPTBLD_INLINE void jmpOpImpl(PC& pc, PC targetpc) {
2592 static_assert(op == OpJmpZ || op == OpJmpNZ,
2593 "jmpOpImpl should only be used by JmpZ and JmpNZ");
2594 jmpSurpriseCheck(targetpc - pc);
2596 Cell* c1 = vmStack().topC();
2597 if (c1->m_type == KindOfInt64 || c1->m_type == KindOfBoolean) {
2598 int64_t n = c1->m_data.num;
2599 vmStack().popX();
2600 if (op == OpJmpZ ? n == 0 : n != 0) pc = targetpc;
2601 } else {
2602 auto const cond = toBoolean(cellAsCVarRef(*c1));
2603 vmStack().popC();
2604 if (op == OpJmpZ ? !cond : cond) pc = targetpc;
2608 OPTBLD_INLINE void iopJmpZ(PC& pc, PC targetpc) {
2609 jmpOpImpl<OpJmpZ>(pc, targetpc);
2612 OPTBLD_INLINE void iopJmpNZ(PC& pc, PC targetpc) {
2613 jmpOpImpl<OpJmpNZ>(pc, targetpc);
2616 struct IterBreakElem {
2617 Id type, iter;
2620 OPTBLD_INLINE
2621 void iopIterBreak(PC& pc, PC targetpc, int32_t n,
2622 imm_array<IterBreakElem> vec) {
2623 assert(n > 0);
2624 for (auto i = 0; i < n; ++i) {
2625 auto e = vec[i];
2626 auto iter = frame_iter(vmfp(), e.iter);
2627 switch (e.type) {
2628 case KindOfIter: iter->free(); break;
2629 case KindOfMIter: iter->mfree(); break;
2630 case KindOfCIter: iter->cfree(); break;
2633 pc = targetpc;
2636 enum class SwitchMatch {
2637 NORMAL, // value was converted to an int: match normally
2638 NONZERO, // can't be converted to an int: match first nonzero case
2639 DEFAULT, // can't be converted to an int: match default case
2642 static SwitchMatch doubleCheck(double d, int64_t& out) {
2643 if (int64_t(d) == d) {
2644 out = d;
2645 return SwitchMatch::NORMAL;
2647 return SwitchMatch::DEFAULT;
2650 OPTBLD_INLINE
2651 void iopSwitch(PC origpc, PC& pc, SwitchKind kind, int64_t base, int veclen,
2652 imm_array<Offset> jmptab) {
2653 assert(veclen > 0);
2654 TypedValue* val = vmStack().topTV();
2655 if (kind == SwitchKind::Unbounded) {
2656 assert(val->m_type == KindOfInt64);
2657 // Continuation switch: no bounds checking needed
2658 int64_t label = val->m_data.num;
2659 vmStack().popX();
2660 assert(label >= 0 && label < veclen);
2661 pc = origpc + jmptab[label];
2662 } else {
2663 // Generic integer switch
2664 int64_t intval;
2665 SwitchMatch match = SwitchMatch::NORMAL;
2667 [&] {
2668 switch (val->m_type) {
2669 case KindOfUninit:
2670 case KindOfNull:
2671 intval = 0;
2672 return;
2674 case KindOfBoolean:
2675 // bool(true) is equal to any non-zero int, bool(false) == 0
2676 if (val->m_data.num) {
2677 match = SwitchMatch::NONZERO;
2678 } else {
2679 intval = 0;
2681 return;
2683 case KindOfInt64:
2684 intval = val->m_data.num;
2685 return;
2687 case KindOfDouble:
2688 match = doubleCheck(val->m_data.dbl, intval);
2689 return;
2691 case KindOfPersistentString:
2692 case KindOfString: {
2693 double dval = 0.0;
2694 DataType t = val->m_data.pstr->isNumericWithVal(intval, dval, 1);
2695 switch (t) {
2696 case KindOfNull:
2697 intval = 0;
2698 break;
2699 case KindOfInt64:
2700 // do nothing
2701 break;
2702 case KindOfDouble:
2703 match = doubleCheck(dval, intval);
2704 break;
2705 case KindOfUninit:
2706 case KindOfBoolean:
2707 case KindOfPersistentString:
2708 case KindOfString:
2709 case KindOfPersistentVec:
2710 case KindOfVec:
2711 case KindOfPersistentDict:
2712 case KindOfDict:
2713 case KindOfPersistentKeyset:
2714 case KindOfKeyset:
2715 case KindOfPersistentArray:
2716 case KindOfArray:
2717 case KindOfObject:
2718 case KindOfResource:
2719 case KindOfRef:
2720 not_reached();
2722 tvRefcountedDecRef(val);
2723 return;
2726 case KindOfVec:
2727 tvDecRef(val);
2728 case KindOfPersistentVec:
2729 match = SwitchMatch::DEFAULT;
2730 return;
2732 case KindOfDict:
2733 tvDecRef(val);
2734 case KindOfPersistentDict:
2735 match = SwitchMatch::DEFAULT;
2736 return;
2738 case KindOfKeyset:
2739 tvDecRef(val);
2740 case KindOfPersistentKeyset:
2741 match = SwitchMatch::DEFAULT;
2742 return;
2744 case KindOfArray:
2745 tvDecRef(val);
2746 case KindOfPersistentArray:
2747 match = SwitchMatch::DEFAULT;
2748 return;
2750 case KindOfObject:
2751 intval = val->m_data.pobj->toInt64();
2752 tvDecRef(val);
2753 return;
2755 case KindOfResource:
2756 intval = val->m_data.pres->data()->o_toInt64();
2757 tvDecRef(val);
2758 return;
2760 case KindOfRef:
2761 break;
2763 not_reached();
2764 }();
2765 vmStack().discard();
2767 if (match != SwitchMatch::NORMAL ||
2768 intval < base || intval >= (base + veclen - 2)) {
2769 switch (match) {
2770 case SwitchMatch::NORMAL:
2771 case SwitchMatch::DEFAULT:
2772 pc = origpc + jmptab[veclen - 1];
2773 break;
2775 case SwitchMatch::NONZERO:
2776 pc = origpc + jmptab[veclen - 2];
2777 break;
2779 } else {
2780 pc = origpc + jmptab[intval - base];
2785 OPTBLD_INLINE
2786 void iopSSwitch(PC origpc, PC& pc, int32_t veclen,
2787 imm_array<StrVecItem> jmptab) {
2788 assert(veclen > 1);
2789 unsigned cases = veclen - 1; // the last vector item is the default case
2790 Cell* val = tvToCell(vmStack().topTV());
2791 Unit* u = vmfp()->m_func->unit();
2792 unsigned i;
2793 for (i = 0; i < cases; ++i) {
2794 auto item = jmptab[i];
2795 const StringData* str = u->lookupLitstrId(item.str);
2796 if (cellEqual(*val, str)) {
2797 pc = origpc + item.dest;
2798 vmStack().popC();
2799 return;
2802 // default case
2803 pc = origpc + jmptab[veclen - 1].dest;
2804 vmStack().popC();
2808 * jitReturnPre and jitReturnPost are used by RetC/V, CreateCont, NativeImpl,
2809 * Yield, and YieldK to perform a few tasks related to interpreting out of a
2810 * frame:
2812 * - If the current frame was entered in the TC and the jit is now off, we
2813 * throw a VMSwitchMode at the beginning of the bytecode to execute the
2814 * call's catch block (if present) before performing the return.
2815 * - If the current frame was entered in the TC and the jit is still on,
2816 * we wait until the end of the bytecode and throw a VMResumeTC, to return to
2817 * our translated caller rather than interpreting back into it.
2818 * - If the current frame was entered by the interpreter but was active when
2819 * the jit called MCGenerator::handleResume() (meaning it's the saved value
2820 * of %rbp in handleResume()'s stack frame), throw a VMResumeTC to reenter
2821 * handleResume(). This is necessary to update the value of %rbp in the TC
2822 * frame, so the unwinder doesn't read from a dead VM frame if something
2823 * throws from the interpreter later on.
2825 namespace {
2826 struct JitReturn {
2827 uint64_t savedRip;
2828 ActRec* fp;
2829 ActRec* sfp;
2830 uint32_t soff;
2833 OPTBLD_INLINE JitReturn jitReturnPre(ActRec* fp) {
2834 auto savedRip = fp->m_savedRip;
2835 if (isReturnHelper(reinterpret_cast<void*>(savedRip))) {
2836 // This frame wasn't called from the TC, so it's ok to return using the
2837 // interpreter. callToExit is special: it's a return helper but we don't
2838 // treat it like one in here in order to simplify some things higher up in
2839 // the pipeline.
2840 if (reinterpret_cast<TCA>(savedRip) != jit::tc::ustubs().callToExit) {
2841 savedRip = 0;
2843 } else if (!RID().getJit()) {
2844 // We entered this frame in the TC but the jit is now disabled, probably
2845 // because a debugger is attached. If we leave this frame in the
2846 // interpreter, we might be skipping a catch block that our caller expects
2847 // to be run. Switch to the interpreter before even beginning the
2848 // instruction.
2849 throw VMSwitchMode();
2852 return {savedRip, fp, fp->sfp(), fp->m_soff};
2855 OPTBLD_INLINE TCA jitReturnPost(JitReturn retInfo) {
2856 if (retInfo.savedRip) {
2857 if (isDebuggerReturnHelper(reinterpret_cast<void*>(retInfo.savedRip))) {
2858 // Our return address was smashed by the debugger. Do the work of the
2859 // debuggerRetHelper by setting some unwinder RDS info and resuming at
2860 // the approprate catch trace.
2861 assert(jit::g_unwind_rds.isInit());
2862 jit::g_unwind_rds->debuggerReturnSP = vmsp();
2863 jit::g_unwind_rds->debuggerReturnOff = retInfo.soff;
2864 return jit::unstashDebuggerCatch(retInfo.fp);
2867 // This frame was called by translated code so we can't interpret out of
2868 // it. Resume in the TC right after our caller. This situation most
2869 // commonly happens when we interpOne a RetC due to having a VarEnv or some
2870 // other weird case.
2871 return TCA(retInfo.savedRip);
2874 if (!retInfo.sfp) {
2875 // If we don't have an sfp, we're returning from the first frame in this VM
2876 // nesting level. The vmJitCalledFrame() check below is only important if
2877 // we might throw before returning to the TC, which is guaranteed to not
2878 // happen in this situation.
2879 assert(vmfp() == nullptr);
2880 return nullptr;
2884 // Consider a situation with a PHP function f() that calls another function
2885 // g(). If the call is interpreted, then we spend some time in the TC inside
2886 // g(), then eventually end in dispatchBB() (called by
2887 // MCGenerator::handleResume()) for g()'s RetC, the logic here kicks in.
2889 // g()'s VM frame was in %rbp when the TC called handleResume(), so it's
2890 // saved somewhere in handleResume()'s stack frame. If we return out of that
2891 // frame and keep going in the interpreter, that saved %rbp will be pointing
2892 // to a garbage VM frame. This is a problem if something needs to throw an
2893 // exception up through handleResume() and the TC frames above it, since the
2894 // C++ unwinder will attempt to treat parts of the popped VM frame as
2895 // pointers and segfault.
2897 // To avoid running with this dangling saved %rbp a few frames up, we
2898 // immediately throw an exception that is "caught" by the TC frame that
2899 // called handleResume(). We resume execution in the TC which reloads the new
2900 // vmfp() into %rbp, then handleResume() is called again, this time with a
2901 // live VM frame in %rbp.
2902 if (vmJitCalledFrame() == retInfo.fp) {
2903 FTRACE(1, "Returning from frame {}; resuming", vmJitCalledFrame());
2904 return jit::tc::ustubs().resumeHelper;
2907 return nullptr;
2912 OPTBLD_INLINE TCA ret(PC& pc) {
2913 auto const jitReturn = jitReturnPre(vmfp());
2915 // Get the return value.
2916 TypedValue retval = *vmStack().topTV();
2917 vmStack().discard();
2919 // Free $this and local variables. Calls FunctionReturn hook. The return
2920 // value must be removed from the stack, or the unwinder would try to free it
2921 // if the hook throws---but the event hook routine decrefs the return value
2922 // in that case if necessary.
2923 frame_free_locals_inl(vmfp(), vmfp()->func()->numLocals(), &retval);
2925 // If in an eagerly executed async function, not called by
2926 // FCallAwait, wrap the return value into succeeded
2927 // StaticWaitHandle.
2928 if (UNLIKELY(vmfp()->mayNeedStaticWaitHandle() &&
2929 vmfp()->func()->isAsyncFunction())) {
2930 auto const& retvalCell = *tvAssertCell(&retval);
2931 // Heads up that we're assuming CreateSucceeded can't throw, or we won't
2932 // decref the return value. (It can't right now.)
2933 auto const waitHandle = c_StaticWaitHandle::CreateSucceeded(retvalCell);
2934 cellCopy(make_tv<KindOfObject>(waitHandle), retval);
2937 if (isProfileRequest()) {
2938 profileIncrementFuncCounter(vmfp()->func());
2941 // Grab caller info from ActRec.
2942 ActRec* sfp = vmfp()->sfp();
2943 Offset soff = vmfp()->m_soff;
2945 if (LIKELY(!vmfp()->resumed())) {
2946 // Free ActRec and store the return value.
2947 vmStack().ndiscard(vmfp()->func()->numSlotsInFrame());
2948 vmStack().ret();
2949 *vmStack().topTV() = retval;
2950 assert(vmStack().topTV() == vmfp()->retSlot());
2951 // In case we were called by a jitted FCallAwait, let it know
2952 // that we finished eagerly.
2953 vmStack().topTV()->m_aux.u_fcallAwaitFlag = 0;
2954 } else if (vmfp()->func()->isAsyncFunction()) {
2955 // Mark the async function as succeeded and store the return value.
2956 assert(!sfp);
2957 auto wh = frame_afwh(vmfp());
2958 wh->ret(retval);
2959 decRefObj(wh);
2960 } else if (vmfp()->func()->isAsyncGenerator()) {
2961 // Mark the async generator as finished.
2962 assert(isNullType(retval.m_type));
2963 auto const gen = frame_async_generator(vmfp());
2964 auto const eagerResult = gen->ret();
2965 if (eagerResult) {
2966 // Eager execution => return StaticWaitHandle.
2967 assert(sfp);
2968 vmStack().pushObjectNoRc(eagerResult);
2969 } else {
2970 // Resumed execution => return control to the scheduler.
2971 assert(!sfp);
2973 } else if (vmfp()->func()->isNonAsyncGenerator()) {
2974 // Mark the generator as finished and store the return value.
2975 frame_generator(vmfp())->ret(retval);
2977 // Push return value of next()/send()/raise().
2978 vmStack().pushNull();
2979 } else {
2980 not_reached();
2983 // Return control to the caller.
2984 vmfp() = sfp;
2985 pc = LIKELY(vmfp() != nullptr) ? vmfp()->func()->getEntry() + soff : nullptr;
2987 return jitReturnPost(jitReturn);
2990 OPTBLD_INLINE TCA iopRetC(PC& pc) {
2991 return ret(pc);
2994 OPTBLD_INLINE TCA iopRetV(PC& pc) {
2995 assert(!vmfp()->resumed());
2996 assert(!vmfp()->func()->isResumable());
2997 return ret(pc);
3000 OPTBLD_INLINE void iopUnwind() {
3001 assert(!g_context->m_faults.empty());
3002 assert(g_context->m_faults.back().m_raiseOffset != kInvalidOffset);
3003 throw VMPrepareUnwind();
3006 OPTBLD_INLINE void iopThrow() {
3007 Cell* c1 = vmStack().topC();
3008 if (c1->m_type != KindOfObject ||
3009 !c1->m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) {
3010 raise_error("Exceptions must implement the Throwable interface.");
3012 auto obj = Object::attach(c1->m_data.pobj);
3013 vmStack().discard();
3014 DEBUGGER_ATTACHED_ONLY(phpDebuggerExceptionThrownHook(obj.get()));
3015 throw req::root<Object>(std::move(obj));
3018 OPTBLD_INLINE void iopClsRefGetC(clsref_slot slot) {
3019 auto const cell = vmStack().topC();
3020 auto const cls = lookupClsRef(cell);
3021 slot.put(cls);
3022 vmStack().popC();
3025 OPTBLD_INLINE void iopClsRefGetL(local_var fr, clsref_slot slot) {
3026 slot.put(lookupClsRef(tvToCell(fr.ptr)));
3029 static void raise_undefined_local(ActRec* fp, Id pind) {
3030 assert(pind < fp->m_func->numNamedLocals());
3031 raise_notice(Strings::UNDEFINED_VARIABLE,
3032 fp->m_func->localVarName(pind)->data());
3035 static inline void cgetl_inner_body(TypedValue* fr, TypedValue* to) {
3036 assert(fr->m_type != KindOfUninit);
3037 cellDup(*tvToCell(fr), *to);
3040 OPTBLD_INLINE void cgetl_body(ActRec* fp,
3041 TypedValue* fr,
3042 TypedValue* to,
3043 Id pind,
3044 bool warn) {
3045 if (fr->m_type == KindOfUninit) {
3046 // `to' is uninitialized here, so we need to tvWriteNull before
3047 // possibly causing stack unwinding.
3048 tvWriteNull(to);
3049 if (warn) raise_undefined_local(fp, pind);
3050 } else {
3051 cgetl_inner_body(fr, to);
3055 OPTBLD_FLT_INLINE void iopCGetL(local_var fr) {
3056 Cell* to = vmStack().allocC();
3057 cgetl_body(vmfp(), fr.ptr, to, fr.index, true);
3060 OPTBLD_INLINE void iopCGetQuietL(local_var fr) {
3061 Cell* to = vmStack().allocC();
3062 cgetl_body(vmfp(), fr.ptr, to, fr.index, false);
3065 OPTBLD_INLINE void iopCUGetL(local_var fr) {
3066 auto to = vmStack().allocTV();
3067 tvDup(*tvToCell(fr.ptr), *to);
3070 OPTBLD_INLINE void iopCGetL2(local_var fr) {
3071 TypedValue* oldTop = vmStack().topTV();
3072 TypedValue* newTop = vmStack().allocTV();
3073 memcpy(newTop, oldTop, sizeof *newTop);
3074 Cell* to = oldTop;
3075 cgetl_body(vmfp(), fr.ptr, to, fr.index, true);
3078 OPTBLD_INLINE void iopPushL(local_var locVal) {
3079 assert(locVal->m_type != KindOfUninit);
3080 assert(locVal->m_type != KindOfRef);
3081 TypedValue* dest = vmStack().allocTV();
3082 *dest = *locVal;
3083 locVal->m_type = KindOfUninit;
3086 OPTBLD_INLINE void cgetn_body(bool warn) {
3087 StringData* name;
3088 TypedValue* to = vmStack().topTV();
3089 TypedValue* fr = nullptr;
3090 lookup_var(vmfp(), name, to, fr);
3091 SCOPE_EXIT { decRefStr(name); };
3092 if (fr == nullptr || fr->m_type == KindOfUninit) {
3093 if (warn) raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3094 tvRefcountedDecRef(to);
3095 tvWriteNull(to);
3096 } else {
3097 tvRefcountedDecRef(to);
3098 cgetl_inner_body(fr, to);
3102 OPTBLD_INLINE void iopCGetN() { cgetn_body(true); }
3103 OPTBLD_INLINE void iopCGetQuietN() { cgetn_body(false); }
3105 OPTBLD_INLINE void cgetg_body(bool warn) {
3106 StringData* name;
3107 TypedValue* to = vmStack().topTV();
3108 TypedValue* fr = nullptr;
3109 lookup_gbl(vmfp(), name, to, fr);
3110 SCOPE_EXIT { decRefStr(name); };
3111 if (fr == nullptr) {
3112 if (warn && MoreWarnings) {
3113 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3115 tvRefcountedDecRef(to);
3116 tvWriteNull(to);
3117 } else if (fr->m_type == KindOfUninit) {
3118 if (warn) raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3119 tvRefcountedDecRef(to);
3120 tvWriteNull(to);
3121 } else {
3122 tvRefcountedDecRef(to);
3123 cgetl_inner_body(fr, to);
3127 OPTBLD_INLINE void iopCGetG() { cgetg_body(true); }
3128 OPTBLD_INLINE void iopCGetQuietG() { cgetg_body(false); }
3130 struct SpropState {
3131 SpropState(Stack&, clsref_slot slot);
3132 ~SpropState();
3133 StringData* name;
3134 Class* cls;
3135 TypedValue* output;
3136 TypedValue* val;
3137 TypedValue oldNameCell;
3138 bool visible;
3139 bool accessible;
3142 SpropState::SpropState(Stack& vmstack, clsref_slot slot) {
3143 cls = slot.take();
3144 auto nameCell = output = vmstack.topTV();
3145 lookup_sprop(vmfp(), cls, name, nameCell, val, visible, accessible);
3146 oldNameCell = *nameCell;
3149 SpropState::~SpropState() {
3150 decRefStr(name);
3151 tvRefcountedDecRef(oldNameCell);
3154 template<bool box> void getS(clsref_slot slot) {
3155 SpropState ss(vmStack(), slot);
3156 if (!(ss.visible && ss.accessible)) {
3157 raise_error("Invalid static property access: %s::%s",
3158 ss.cls->name()->data(),
3159 ss.name->data());
3161 if (box) {
3162 if (ss.val->m_type != KindOfRef) {
3163 tvBox(ss.val);
3165 refDup(*ss.val, *ss.output);
3166 } else {
3167 cellDup(*tvToCell(ss.val), *ss.output);
3171 OPTBLD_INLINE void iopCGetS(clsref_slot slot) {
3172 getS<false>(slot);
3175 static inline MInstrState& initMState() {
3176 auto& mstate = vmMInstrState();
3177 tvWriteUninit(&mstate.tvRef);
3178 tvWriteUninit(&mstate.tvRef2);
3179 return mstate;
3182 using LookupNameFn = void (*)(ActRec*, StringData*&, TypedValue*, TypedValue*&);
3184 static inline void baseNGImpl(TypedValue* key, MOpMode mode,
3185 LookupNameFn lookupd, LookupNameFn lookup) {
3186 auto& mstate = initMState();
3187 StringData* name;
3188 TypedValue* baseVal;
3190 if (mode == MOpMode::Define) lookupd(vmfp(), name, key, baseVal);
3191 else lookup(vmfp(), name, key, baseVal);
3192 SCOPE_EXIT { decRefStr(name); };
3194 if (baseVal == nullptr) {
3195 assert(mode != MOpMode::Define);
3196 if (mode == MOpMode::Warn) {
3197 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3199 tvWriteNull(&mstate.tvTempBase);
3200 mstate.base = &mstate.tvTempBase;
3201 return;
3204 mstate.base = baseVal;
3207 static inline void baseNImpl(TypedValue* key, MOpMode mode) {
3208 baseNGImpl(key, mode, lookupd_var, lookup_var);
3211 OPTBLD_INLINE void iopBaseNC(intva_t idx, MOpMode mode) {
3212 baseNImpl(vmStack().indTV(idx), mode);
3215 OPTBLD_INLINE void iopBaseNL(local_var loc, MOpMode mode) {
3216 baseNImpl(tvToCell(loc.ptr), mode);
3219 OPTBLD_INLINE void iopFPassBaseNC(ActRec* ar, intva_t paramId, intva_t idx) {
3220 auto const mode = fpass_mode(ar, paramId);
3221 baseNImpl(vmStack().indTV(idx), mode);
3224 OPTBLD_INLINE void iopFPassBaseNL(ActRec* ar, intva_t paramId, local_var loc) {
3225 auto const mode = fpass_mode(ar, paramId);
3226 baseNImpl(tvToCell(loc.ptr), mode);
3229 static inline void baseGImpl(TypedValue* key, MOpMode mode) {
3230 baseNGImpl(key, mode, lookupd_gbl, lookup_gbl);
3233 OPTBLD_INLINE void iopBaseGC(intva_t idx, MOpMode mode) {
3234 baseGImpl(vmStack().indTV(idx), mode);
3237 OPTBLD_INLINE void iopBaseGL(local_var loc, MOpMode mode) {
3238 baseGImpl(tvToCell(loc.ptr), mode);
3241 OPTBLD_INLINE void iopFPassBaseGC(ActRec* ar, intva_t paramId, intva_t idx) {
3242 auto const mode = fpass_mode(ar, paramId);
3243 baseGImpl(vmStack().indTV(idx), mode);
3246 OPTBLD_INLINE void iopFPassBaseGL(ActRec* ar, intva_t paramId, local_var loc) {
3247 auto const mode = fpass_mode(ar, paramId);
3248 baseGImpl(tvToCell(loc.ptr), mode);
3251 static inline TypedValue* baseSImpl(TypedValue* key,
3252 clsref_slot slot) {
3253 auto const class_ = slot.take();
3255 auto const name = lookup_name(key);
3256 SCOPE_EXIT { decRefStr(name); };
3257 auto const lookup = class_->getSProp(arGetContextClass(vmfp()), name);
3258 if (!lookup.prop || !lookup.accessible) {
3259 raise_error("Invalid static property access: %s::%s",
3260 class_->name()->data(),
3261 name->data());
3264 return lookup.prop;
3267 OPTBLD_INLINE void iopBaseSC(intva_t keyIdx, clsref_slot slot) {
3268 auto& mstate = initMState();
3269 mstate.base = baseSImpl(vmStack().indTV(keyIdx), slot);
3272 OPTBLD_INLINE void iopBaseSL(local_var keyLoc, clsref_slot slot) {
3273 auto& mstate = initMState();
3274 mstate.base = baseSImpl(tvToCell(keyLoc.ptr), slot);
3277 OPTBLD_INLINE void baseLImpl(local_var loc, MOpMode mode) {
3278 auto& mstate = initMState();
3279 auto local = tvToCell(loc.ptr);
3280 if (mode == MOpMode::Warn && local->m_type == KindOfUninit) {
3281 raise_notice(Strings::UNDEFINED_VARIABLE,
3282 vmfp()->m_func->localVarName(loc.index)->data());
3284 mstate.base = local;
3287 OPTBLD_INLINE void iopBaseL(local_var loc, MOpMode mode) {
3288 baseLImpl(loc, mode);
3291 OPTBLD_INLINE void iopFPassBaseL(ActRec* ar, intva_t paramId, local_var loc) {
3292 auto mode = fpass_mode(ar, paramId);
3293 baseLImpl(loc, mode);
3296 OPTBLD_INLINE void iopBaseC(intva_t idx) {
3297 auto& mstate = initMState();
3298 mstate.base = vmStack().indTV(idx);
3301 OPTBLD_INLINE void iopBaseR(intva_t idx) {
3302 iopBaseC(idx);
3305 OPTBLD_INLINE void iopBaseH() {
3306 auto& mstate = initMState();
3307 mstate.tvTempBase = make_tv<KindOfObject>(vmfp()->getThis());
3308 mstate.base = &mstate.tvTempBase;
3311 static OPTBLD_INLINE void propDispatch(MOpMode mode, TypedValue key) {
3312 auto& mstate = vmMInstrState();
3313 auto ctx = arGetContextClass(vmfp());
3315 auto result = [&]{
3316 switch (mode) {
3317 case MOpMode::None:
3318 return Prop<MOpMode::None>(mstate.tvRef, ctx, mstate.base, key);
3319 case MOpMode::Warn:
3320 return Prop<MOpMode::Warn>(mstate.tvRef, ctx, mstate.base, key);
3321 case MOpMode::Define:
3322 return Prop<MOpMode::Define>(mstate.tvRef, ctx, mstate.base, key);
3323 case MOpMode::Unset:
3324 return Prop<MOpMode::Unset>(mstate.tvRef, ctx, mstate.base, key);
3326 always_assert(false);
3327 }();
3329 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3332 static OPTBLD_INLINE void propQDispatch(MOpMode mode, TypedValue key,
3333 bool reffy) {
3334 auto& mstate = vmMInstrState();
3335 auto ctx = arGetContextClass(vmfp());
3337 TypedValue* result;
3338 switch (mode) {
3339 case MOpMode::None:
3340 case MOpMode::Warn:
3341 assert(key.m_type == KindOfPersistentString);
3342 result = nullSafeProp(mstate.tvRef, ctx, mstate.base, key.m_data.pstr);
3343 break;
3345 case MOpMode::Define:
3346 if (reffy) raise_error(Strings::NULLSAFE_PROP_WRITE_ERROR);
3347 case MOpMode::Unset:
3348 always_assert(false);
3351 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3354 static OPTBLD_INLINE
3355 void elemDispatch(MOpMode mode, TypedValue key, bool reffy) {
3356 auto& mstate = vmMInstrState();
3358 auto result = [&] {
3359 switch (mode) {
3360 case MOpMode::None:
3361 // We're not actually going to modify it, so this is "safe".
3362 return const_cast<TypedValue*>(
3363 Elem<MOpMode::None>(mstate.tvRef, mstate.base, key)
3365 case MOpMode::Warn:
3366 // We're not actually going to modify it, so this is "safe".
3367 return const_cast<TypedValue*>(
3368 Elem<MOpMode::Warn>(mstate.tvRef, mstate.base, key)
3370 case MOpMode::Define:
3371 return reffy
3372 ? ElemD<MOpMode::Define, true>(mstate.tvRef, mstate.base, key)
3373 : ElemD<MOpMode::Define, false>(mstate.tvRef, mstate.base, key);
3374 case MOpMode::Unset:
3375 return ElemU(mstate.tvRef, mstate.base, key);
3377 always_assert(false);
3378 }();
3380 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3383 static inline TypedValue key_tv(MemberKey key) {
3384 switch (key.mcode) {
3385 case MW:
3386 return TypedValue{};
3387 case MEL: case MPL: {
3388 auto local = tvToCell(frame_local(vmfp(), key.iva));
3389 if (local->m_type == KindOfUninit) {
3390 raise_undefined_local(vmfp(), key.iva);
3391 return make_tv<KindOfNull>();
3393 return *local;
3395 case MEC: case MPC:
3396 return *vmStack().indTV(key.iva);
3397 case MEI:
3398 return make_tv<KindOfInt64>(key.int64);
3399 case MET: case MPT: case MQT:
3400 return make_tv<KindOfPersistentString>(key.litstr);
3402 not_reached();
3405 static OPTBLD_INLINE void dimDispatch(MOpMode mode, MemberKey mk,
3406 bool reffy) {
3407 auto const key = key_tv(mk);
3408 if (mk.mcode == MQT) {
3409 propQDispatch(mode, key, reffy);
3410 } else if (mcodeIsProp(mk.mcode)) {
3411 propDispatch(mode, key);
3412 } else if (mcodeIsElem(mk.mcode)) {
3413 elemDispatch(mode, key, reffy);
3414 } else {
3415 if (mode == MOpMode::Warn) raise_error("Cannot use [] for reading");
3417 auto& mstate = vmMInstrState();
3419 TypedValue* result;
3420 if (reffy) {
3421 if (UNLIKELY(isHackArrayType(mstate.base->m_type))) {
3422 throwRefInvalidArrayValueException(mstate.base->m_data.parr);
3424 result = NewElem<true>(mstate.tvRef, mstate.base);
3425 } else {
3426 result = NewElem<false>(mstate.tvRef, mstate.base);
3428 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3432 OPTBLD_INLINE void iopDim(MOpMode mode, MemberKey mk) {
3433 dimDispatch(mode, mk, false);
3436 OPTBLD_INLINE void iopFPassDim(ActRec* ar, intva_t paramId, MemberKey mk) {
3437 auto const mode = fpass_mode(ar, paramId);
3438 dimDispatch(mode, mk, false);
3441 static OPTBLD_INLINE void mFinal(MInstrState& mstate,
3442 int32_t nDiscard,
3443 folly::Optional<TypedValue> result) {
3444 auto& stack = vmStack();
3445 for (auto i = 0; i < nDiscard; ++i) stack.popTV();
3446 if (result) tvCopy(*result, *stack.allocTV());
3448 tvUnlikelyRefcountedDecRef(mstate.tvRef);
3449 tvUnlikelyRefcountedDecRef(mstate.tvRef2);
3452 static OPTBLD_INLINE
3453 void queryMImpl(MemberKey mk, int32_t nDiscard, QueryMOp op) {
3454 auto const key = key_tv(mk);
3455 auto& mstate = vmMInstrState();
3456 TypedValue result;
3457 switch (op) {
3458 case QueryMOp::CGet:
3459 case QueryMOp::CGetQuiet:
3460 dimDispatch(getQueryMOpMode(op), mk, false);
3461 tvDup(*tvToCell(mstate.base), result);
3462 break;
3464 case QueryMOp::Isset:
3465 case QueryMOp::Empty:
3466 result.m_type = KindOfBoolean;
3467 if (mcodeIsProp(mk.mcode)) {
3468 auto const ctx = arGetContextClass(vmfp());
3469 result.m_data.num = op == QueryMOp::Empty
3470 ? IssetEmptyProp<true>(ctx, mstate.base, key)
3471 : IssetEmptyProp<false>(ctx, mstate.base, key);
3472 } else {
3473 assert(mcodeIsElem(mk.mcode));
3474 result.m_data.num = op == QueryMOp::Empty
3475 ? IssetEmptyElem<true>(mstate.base, key)
3476 : IssetEmptyElem<false>(mstate.base, key);
3478 break;
3480 mFinal(mstate, nDiscard, result);
3483 OPTBLD_INLINE void iopQueryM(intva_t nDiscard, QueryMOp subop, MemberKey mk) {
3484 queryMImpl(mk, nDiscard, subop);
3487 static OPTBLD_INLINE void vGetMImpl(MemberKey mk, int32_t nDiscard) {
3488 auto& mstate = vmMInstrState();
3489 TypedValue result;
3490 dimDispatch(MOpMode::Define, mk, true);
3491 if (mstate.base->m_type != KindOfRef) tvBox(mstate.base);
3492 refDup(*mstate.base, result);
3493 mFinal(mstate, nDiscard, result);
3496 OPTBLD_INLINE void iopVGetM(intva_t nDiscard, MemberKey mk) {
3497 vGetMImpl(mk, nDiscard);
3500 OPTBLD_INLINE
3501 void iopFPassM(intva_t paramId, intva_t nDiscard, MemberKey mk) {
3502 auto ar = arFromSp(paramId + nDiscard);
3503 auto const mode = fpass_mode(ar, paramId);
3504 if (mode == MOpMode::Warn) {
3505 return queryMImpl(mk, nDiscard, QueryMOp::CGet);
3507 vGetMImpl(mk, nDiscard);
3510 OPTBLD_FLT_INLINE void iopSetM(intva_t nDiscard, MemberKey mk) {
3511 auto& mstate = vmMInstrState();
3512 auto const topC = vmStack().topC();
3514 if (mk.mcode == MW) {
3515 SetNewElem<true>(mstate.base, topC);
3516 } else {
3517 auto const key = key_tv(mk);
3518 if (mcodeIsElem(mk.mcode)) {
3519 auto const result = SetElem<true>(mstate.base, key, topC);
3520 if (result) {
3521 tvRefcountedDecRef(topC);
3522 topC->m_type = KindOfString;
3523 topC->m_data.pstr = result;
3525 } else {
3526 auto const ctx = arGetContextClass(vmfp());
3527 SetProp<true>(ctx, mstate.base, key, topC);
3531 auto const result = *topC;
3532 vmStack().discard();
3533 mFinal(mstate, nDiscard, result);
3536 OPTBLD_INLINE void iopIncDecM(intva_t nDiscard, IncDecOp subop, MemberKey mk) {
3537 auto const key = key_tv(mk);
3539 auto& mstate = vmMInstrState();
3540 Cell result;
3541 if (mcodeIsProp(mk.mcode)) {
3542 result = IncDecProp(arGetContextClass(vmfp()), subop, mstate.base, key);
3543 } else if (mcodeIsElem(mk.mcode)) {
3544 result = IncDecElem(subop, mstate.base, key);
3545 } else {
3546 result = IncDecNewElem(mstate.tvRef, subop, mstate.base);
3549 mFinal(mstate, nDiscard, result);
3552 OPTBLD_INLINE void iopSetOpM(intva_t nDiscard, SetOpOp subop, MemberKey mk) {
3553 auto const key = key_tv(mk);
3554 auto const rhs = vmStack().topC();
3556 auto& mstate = vmMInstrState();
3557 TypedValue* result;
3558 if (mcodeIsProp(mk.mcode)) {
3559 result = SetOpProp(mstate.tvRef, arGetContextClass(vmfp()), subop,
3560 mstate.base, key, rhs);
3561 } else if (mcodeIsElem(mk.mcode)) {
3562 result = SetOpElem(mstate.tvRef, subop, mstate.base, key, rhs);
3563 } else {
3564 result = SetOpNewElem(mstate.tvRef, subop, mstate.base, rhs);
3567 vmStack().popC();
3568 result = tvToCell(result);
3569 tvRefcountedIncRef(result);
3570 mFinal(mstate, nDiscard, *result);
3573 OPTBLD_INLINE void iopBindM(intva_t nDiscard, MemberKey mk) {
3574 auto& mstate = vmMInstrState();
3575 auto const rhs = *vmStack().topV();
3577 dimDispatch(MOpMode::Define, mk, true);
3578 tvBind(&rhs, mstate.base);
3580 vmStack().discard();
3581 mFinal(mstate, nDiscard, rhs);
3584 OPTBLD_INLINE void iopUnsetM(intva_t nDiscard, MemberKey mk) {
3585 auto const key = key_tv(mk);
3587 auto& mstate = vmMInstrState();
3588 if (mcodeIsProp(mk.mcode)) {
3589 UnsetProp(arGetContextClass(vmfp()), mstate.base, key);
3590 } else {
3591 assert(mcodeIsElem(mk.mcode));
3592 UnsetElem(mstate.base, key);
3595 mFinal(mstate, nDiscard, folly::none);
3598 static OPTBLD_INLINE void setWithRefImpl(TypedValue key, TypedValue* value) {
3599 auto& mstate = vmMInstrState();
3600 mstate.base = UNLIKELY(value->m_type == KindOfRef)
3601 ? ElemD<MOpMode::Define, true>(mstate.tvRef, mstate.base, key)
3602 : ElemD<MOpMode::Define, false>(mstate.tvRef, mstate.base, key);
3603 tvAsVariant(mstate.base).setWithRef(tvAsVariant(value));
3605 mFinal(mstate, 0, folly::none);
3608 OPTBLD_INLINE void iopSetWithRefLML(local_var kloc, local_var vloc) {
3609 auto const key = *tvToCell(kloc.ptr);
3610 setWithRefImpl(key, vloc.ptr);
3613 OPTBLD_INLINE void iopSetWithRefRML(local_var local) {
3614 auto const key = *tvToCell(local.ptr);
3615 setWithRefImpl(key, vmStack().topTV());
3616 vmStack().popTV();
3619 OPTBLD_INLINE void iopMemoGet(intva_t nDiscard,
3620 LocalRange locals) {
3621 assertx(vmfp()->m_func->isMemoizeWrapper());
3622 assertx(locals.first + locals.restCount < vmfp()->m_func->numLocals());
3623 auto mstate = vmMInstrState();
3624 auto const res = MixedArray::MemoGet(
3625 mstate.base,
3626 frame_local(vmfp(), locals.first),
3627 locals.restCount + 1
3629 mFinal(mstate, nDiscard, res);
3632 OPTBLD_INLINE void iopMemoSet(intva_t nDiscard,
3633 LocalRange locals) {
3634 assertx(vmfp()->m_func->isMemoizeWrapper());
3635 assertx(locals.first + locals.restCount < vmfp()->m_func->numLocals());
3636 auto const value = *vmStack().topC();
3637 auto mstate = vmMInstrState();
3638 MixedArray::MemoSet(
3639 mstate.base,
3640 frame_local(vmfp(), locals.first),
3641 locals.restCount + 1,
3642 value
3644 vmStack().discard();
3645 mFinal(mstate, nDiscard, value);
3648 static inline void vgetl_body(TypedValue* fr, TypedValue* to) {
3649 if (fr->m_type != KindOfRef) {
3650 tvBox(fr);
3652 refDup(*fr, *to);
3655 OPTBLD_INLINE void iopVGetL(local_var fr) {
3656 Ref* to = vmStack().allocV();
3657 vgetl_body(fr.ptr, to);
3660 OPTBLD_INLINE void iopVGetN() {
3661 StringData* name;
3662 TypedValue* to = vmStack().topTV();
3663 TypedValue* fr = nullptr;
3664 lookupd_var(vmfp(), name, to, fr);
3665 SCOPE_EXIT { decRefStr(name); };
3666 assert(fr != nullptr);
3667 tvRefcountedDecRef(to);
3668 vgetl_body(fr, to);
3671 OPTBLD_INLINE void iopVGetG() {
3672 StringData* name;
3673 TypedValue* to = vmStack().topTV();
3674 TypedValue* fr = nullptr;
3675 lookupd_gbl(vmfp(), name, to, fr);
3676 SCOPE_EXIT { decRefStr(name); };
3677 assert(fr != nullptr);
3678 tvRefcountedDecRef(to);
3679 vgetl_body(fr, to);
3682 OPTBLD_INLINE void iopVGetS(clsref_slot slot) {
3683 getS<true>(slot);
3686 OPTBLD_INLINE void iopIssetN() {
3687 StringData* name;
3688 TypedValue* tv1 = vmStack().topTV();
3689 TypedValue* tv = nullptr;
3690 bool e;
3691 lookup_var(vmfp(), name, tv1, tv);
3692 SCOPE_EXIT { decRefStr(name); };
3693 if (tv == nullptr) {
3694 e = false;
3695 } else {
3696 e = !cellIsNull(tvToCell(tv));
3698 vmStack().replaceC<KindOfBoolean>(e);
3701 OPTBLD_INLINE void iopIssetG() {
3702 StringData* name;
3703 TypedValue* tv1 = vmStack().topTV();
3704 TypedValue* tv = nullptr;
3705 bool e;
3706 lookup_gbl(vmfp(), name, tv1, tv);
3707 SCOPE_EXIT { decRefStr(name); };
3708 if (tv == nullptr) {
3709 e = false;
3710 } else {
3711 e = !cellIsNull(tvToCell(tv));
3713 vmStack().replaceC<KindOfBoolean>(e);
3716 OPTBLD_INLINE void iopIssetS(clsref_slot slot) {
3717 SpropState ss(vmStack(), slot);
3718 bool e;
3719 if (!(ss.visible && ss.accessible)) {
3720 e = false;
3721 } else {
3722 e = !cellIsNull(tvToCell(ss.val));
3724 ss.output->m_data.num = e;
3725 ss.output->m_type = KindOfBoolean;
3728 OPTBLD_FLT_INLINE void iopIssetL(local_var tv) {
3729 bool ret = is_not_null(tvAsCVarRef(tv.ptr));
3730 TypedValue* topTv = vmStack().allocTV();
3731 topTv->m_data.num = ret;
3732 topTv->m_type = KindOfBoolean;
3735 OPTBLD_INLINE static bool isTypeHelper(TypedValue* tv, IsTypeOp op) {
3736 switch (op) {
3737 case IsTypeOp::Uninit: return tv->m_type == KindOfUninit;
3738 case IsTypeOp::Null: return is_null(tvAsCVarRef(tv));
3739 case IsTypeOp::Bool: return is_bool(tvAsCVarRef(tv));
3740 case IsTypeOp::Int: return is_int(tvAsCVarRef(tv));
3741 case IsTypeOp::Dbl: return is_double(tvAsCVarRef(tv));
3742 case IsTypeOp::Arr: return is_array(tvAsCVarRef(tv));
3743 case IsTypeOp::Vec: return is_vec(tvAsCVarRef(tv));
3744 case IsTypeOp::Dict: return is_dict(tvAsCVarRef(tv));
3745 case IsTypeOp::Keyset: return is_keyset(tvAsCVarRef(tv));
3746 case IsTypeOp::Obj: return is_object(tvAsCVarRef(tv));
3747 case IsTypeOp::Str: return is_string(tvAsCVarRef(tv));
3748 case IsTypeOp::Scalar: return HHVM_FN(is_scalar)(tvAsCVarRef(tv));
3750 not_reached();
3753 OPTBLD_INLINE void iopIsTypeL(local_var loc, IsTypeOp op) {
3754 if (loc.ptr->m_type == KindOfUninit) {
3755 raise_undefined_local(vmfp(), loc.index);
3757 TypedValue* topTv = vmStack().allocTV();
3758 topTv->m_data.num = isTypeHelper(loc.ptr, op);
3759 topTv->m_type = KindOfBoolean;
3762 OPTBLD_INLINE void iopIsTypeC(IsTypeOp op) {
3763 TypedValue* topTv = vmStack().topTV();
3764 assert(topTv->m_type != KindOfRef);
3765 bool ret = isTypeHelper(topTv, op);
3766 tvRefcountedDecRef(topTv);
3767 topTv->m_data.num = ret;
3768 topTv->m_type = KindOfBoolean;
3771 OPTBLD_INLINE void iopIsUninit() {
3772 auto const* cell = vmStack().topC();
3773 assertx(cellIsPlausible(*cell));
3774 vmStack().pushBool(cell->m_type == KindOfUninit);
3777 OPTBLD_INLINE void iopMaybeMemoType() {
3778 assertx(vmfp()->m_func->isMemoizeWrapper());
3779 vmStack().replaceTV(make_tv<KindOfBoolean>(true));
3782 OPTBLD_INLINE void iopIsMemoType() {
3783 assertx(vmfp()->m_func->isMemoizeWrapper());
3784 vmStack().replaceTV(make_tv<KindOfBoolean>(false));
3787 OPTBLD_FLT_INLINE void iopAssertRATL(local_var loc, RepoAuthType rat) {
3788 if (debug) {
3789 auto const tv = *loc.ptr;
3790 auto const func = vmfp()->func();
3791 auto vm = &*g_context;
3792 always_assert_flog(
3793 tvMatchesRepoAuthType(tv, rat),
3794 "failed assert RATL on local {}: ${} in {}:{}, expected {}, got {}",
3795 loc.index,
3796 loc.index < func->numNamedLocals() ?
3797 func->localNames()[loc.index]->data() : "<unnamed>",
3798 vm->getContainingFileName()->data(),
3799 vm->getLine(),
3800 show(rat),
3801 toStringElm(&tv)
3806 OPTBLD_INLINE void iopAssertRATStk(intva_t stkSlot, RepoAuthType rat) {
3807 if (debug) {
3808 auto const tv = *vmStack().indTV(stkSlot);
3809 auto vm = &*g_context;
3810 always_assert_flog(
3811 tvMatchesRepoAuthType(tv, rat),
3812 "failed assert RATStk {} in {}:{}, expected {}, got {}",
3813 stkSlot.n,
3814 vm->getContainingFileName()->data(),
3815 vm->getLine(),
3816 show(rat),
3817 toStringElm(&tv)
3822 OPTBLD_INLINE void iopBreakTraceHint() {
3825 OPTBLD_INLINE void iopEmptyL(local_var loc) {
3826 bool e = !cellToBool(*tvToCell(loc.ptr));
3827 vmStack().pushBool(e);
3830 OPTBLD_INLINE void iopEmptyN() {
3831 StringData* name;
3832 TypedValue* tv1 = vmStack().topTV();
3833 TypedValue* tv = nullptr;
3834 bool e;
3835 lookup_var(vmfp(), name, tv1, tv);
3836 SCOPE_EXIT { decRefStr(name); };
3837 if (tv == nullptr) {
3838 e = true;
3839 } else {
3840 e = !cellToBool(*tvToCell(tv));
3842 vmStack().replaceC<KindOfBoolean>(e);
3845 OPTBLD_INLINE void iopEmptyG() {
3846 StringData* name;
3847 TypedValue* tv1 = vmStack().topTV();
3848 TypedValue* tv = nullptr;
3849 bool e;
3850 lookup_gbl(vmfp(), name, tv1, tv);
3851 SCOPE_EXIT { decRefStr(name); };
3852 if (tv == nullptr) {
3853 e = true;
3854 } else {
3855 e = !cellToBool(*tvToCell(tv));
3857 vmStack().replaceC<KindOfBoolean>(e);
3860 OPTBLD_INLINE void iopEmptyS(clsref_slot slot) {
3861 SpropState ss(vmStack(), slot);
3862 bool e;
3863 if (!(ss.visible && ss.accessible)) {
3864 e = true;
3865 } else {
3866 e = !cellToBool(*tvToCell(ss.val));
3868 ss.output->m_data.num = e;
3869 ss.output->m_type = KindOfBoolean;
3872 OPTBLD_INLINE void iopAKExists() {
3873 TypedValue* arr = vmStack().topTV();
3874 TypedValue* key = arr + 1;
3875 bool result = HHVM_FN(array_key_exists)(tvAsCVarRef(key), tvAsCVarRef(arr));
3876 vmStack().popTV();
3877 vmStack().replaceTV<KindOfBoolean>(result);
3880 OPTBLD_INLINE void iopGetMemoKeyL(local_var loc) {
3881 auto const func = vmfp()->m_func;
3882 assertx(func->isMemoizeWrapper());
3883 assertx(!func->anyByRef());
3885 // If this local corresponds to one of the function's parameters, and there's
3886 // a useful type-hint (which is being enforced), we can use a more efficient
3887 // memoization scheme based on the range of types we know this local can
3888 // have. This scheme needs to agree with HHBBC and the JIT.
3889 using MK = MemoKeyConstraint;
3890 auto const mkc = [&]{
3891 if (!RuntimeOption::RepoAuthoritative || !Repo::global().HardTypeHints) {
3892 return MK::None;
3894 if (loc.index >= func->numParams()) return MK::None;
3895 return memoKeyConstraintFromTC(func->params()[loc.index].typeConstraint);
3896 }();
3898 if (UNLIKELY(loc.ptr->m_type == KindOfUninit)) {
3899 tvWriteNull(loc.ptr);
3900 raise_undefined_local(vmfp(), loc.index);
3903 auto const key = [&](){
3904 switch (mkc) {
3905 case MK::Null:
3906 assertx(loc.ptr->m_type == KindOfNull);
3907 return make_tv<KindOfInt64>(0);
3908 case MK::Int:
3909 case MK::IntOrNull:
3910 if (loc.ptr->m_type == KindOfInt64) {
3911 return *loc.ptr;
3912 } else {
3913 assertx(loc.ptr->m_type == KindOfNull);
3914 return make_tv<KindOfPersistentString>(s_nullMemoKey.get());
3916 case MK::Bool:
3917 case MK::BoolOrNull:
3918 if (loc.ptr->m_type == KindOfBoolean) {
3919 return make_tv<KindOfInt64>(loc.ptr->m_data.num);
3920 } else {
3921 assertx(loc.ptr->m_type == KindOfNull);
3922 return make_tv<KindOfInt64>(2);
3924 case MK::Str:
3925 case MK::StrOrNull:
3926 if (tvIsString(loc.ptr)) {
3927 tvRefcountedIncRef(loc.ptr);
3928 return *loc.ptr;
3929 } else {
3930 assertx(loc.ptr->m_type == KindOfNull);
3931 return make_tv<KindOfInt64>(0);
3933 case MK::IntOrStr:
3934 assertx(tvIsString(loc.ptr) || loc.ptr->m_type == KindOfInt64);
3935 tvRefcountedIncRef(loc.ptr);
3936 return *loc.ptr;
3937 case MK::None:
3938 // Use the generic scheme, which is performed by
3939 // serialize_memoize_param.
3940 return HHVM_FN(serialize_memoize_param)(*loc.ptr);
3942 not_reached();
3943 }();
3945 cellCopy(key, *vmStack().allocC());
3948 namespace {
3949 const StaticString s_idx("hh\\idx");
3951 TypedValue genericIdx(TypedValue obj, TypedValue key, TypedValue def) {
3952 static auto func = Unit::loadFunc(s_idx.get());
3953 assertx(func != nullptr);
3954 TypedValue args[] = {
3955 obj,
3956 key,
3959 return g_context->invokeFuncFew(func, nullptr, nullptr, 3, &args[0]);
3963 OPTBLD_INLINE void iopIdx() {
3964 TypedValue* def = vmStack().topTV();
3965 TypedValue* key = vmStack().indTV(1);
3966 TypedValue* arr = vmStack().indTV(2);
3968 TypedValue result;
3969 if (isArrayLikeType(arr->m_type)) {
3970 result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
3971 tvAsCVarRef(key),
3972 tvAsCVarRef(def));
3973 vmStack().popTV();
3974 } else if (isNullType(key->m_type)) {
3975 tvRefcountedDecRef(arr);
3976 *arr = *def;
3977 vmStack().ndiscard(2);
3978 return;
3979 } else if (!isStringType(arr->m_type) &&
3980 arr->m_type != KindOfObject) {
3981 result = *def;
3982 vmStack().discard();
3983 } else {
3984 result = genericIdx(*arr, *key, *def);
3985 vmStack().popTV();
3987 vmStack().popTV();
3988 tvRefcountedDecRef(arr);
3989 *arr = result;
3992 OPTBLD_INLINE void iopArrayIdx() {
3993 TypedValue* def = vmStack().topTV();
3994 TypedValue* key = vmStack().indTV(1);
3995 TypedValue* arr = vmStack().indTV(2);
3997 auto const result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
3998 tvAsCVarRef(key),
3999 tvAsCVarRef(def));
4000 vmStack().popTV();
4001 vmStack().popTV();
4002 tvRefcountedDecRef(arr);
4003 *arr = result;
4006 OPTBLD_INLINE void iopSetL(local_var to) {
4007 assert(to.index < vmfp()->m_func->numLocals());
4008 Cell* fr = vmStack().topC();
4009 tvSet(*fr, *to);
4012 OPTBLD_INLINE void iopSetN() {
4013 StringData* name;
4014 Cell* fr = vmStack().topC();
4015 TypedValue* tv2 = vmStack().indTV(1);
4016 TypedValue* to = nullptr;
4017 lookupd_var(vmfp(), name, tv2, to);
4018 SCOPE_EXIT { decRefStr(name); };
4019 assert(to != nullptr);
4020 tvSet(*fr, *to);
4021 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
4022 vmStack().discard();
4025 OPTBLD_INLINE void iopSetG() {
4026 StringData* name;
4027 Cell* fr = vmStack().topC();
4028 TypedValue* tv2 = vmStack().indTV(1);
4029 TypedValue* to = nullptr;
4030 lookupd_gbl(vmfp(), name, tv2, to);
4031 SCOPE_EXIT { decRefStr(name); };
4032 assert(to != nullptr);
4033 tvSet(*fr, *to);
4034 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
4035 vmStack().discard();
4038 OPTBLD_INLINE void iopSetS(clsref_slot slot) {
4039 TypedValue* tv1 = vmStack().topTV();
4040 Class* cls = slot.take();
4041 TypedValue* propn = vmStack().indTV(1);
4042 TypedValue* output = propn;
4043 StringData* name;
4044 TypedValue* val;
4045 bool visible, accessible;
4046 lookup_sprop(vmfp(), cls, name, propn, val, visible, accessible);
4047 SCOPE_EXIT { decRefStr(name); };
4048 if (!(visible && accessible)) {
4049 raise_error("Invalid static property access: %s::%s",
4050 cls->name()->data(),
4051 name->data());
4053 tvSet(*tv1, *val);
4054 tvRefcountedDecRef(propn);
4055 memcpy(output, tv1, sizeof(TypedValue));
4056 vmStack().ndiscard(1);
4059 OPTBLD_INLINE void iopSetOpL(local_var loc, SetOpOp op) {
4060 Cell* fr = vmStack().topC();
4061 Cell* to = tvToCell(loc.ptr);
4062 setopBody(to, op, fr);
4063 tvRefcountedDecRef(fr);
4064 cellDup(*to, *fr);
4067 OPTBLD_INLINE void iopSetOpN(SetOpOp op) {
4068 Cell* fr = vmStack().topC();
4069 TypedValue* tv2 = vmStack().indTV(1);
4070 TypedValue* to = nullptr;
4071 // XXX We're probably not getting warnings totally correct here
4072 StringData* name;
4073 lookupd_var(vmfp(), name, tv2, to);
4074 SCOPE_EXIT { decRefStr(name); };
4075 assert(to != nullptr);
4076 setopBody(tvToCell(to), op, fr);
4077 tvRefcountedDecRef(fr);
4078 tvRefcountedDecRef(tv2);
4079 cellDup(*tvToCell(to), *tv2);
4080 vmStack().discard();
4083 OPTBLD_INLINE void iopSetOpG(SetOpOp op) {
4084 StringData* name;
4085 Cell* fr = vmStack().topC();
4086 TypedValue* tv2 = vmStack().indTV(1);
4087 TypedValue* to = nullptr;
4088 // XXX We're probably not getting warnings totally correct here
4089 lookupd_gbl(vmfp(), name, tv2, to);
4090 SCOPE_EXIT { decRefStr(name); };
4091 assert(to != nullptr);
4092 setopBody(tvToCell(to), op, fr);
4093 tvRefcountedDecRef(fr);
4094 tvRefcountedDecRef(tv2);
4095 cellDup(*tvToCell(to), *tv2);
4096 vmStack().discard();
4099 OPTBLD_INLINE void iopSetOpS(SetOpOp op, clsref_slot slot) {
4100 Cell* fr = vmStack().topC();
4101 Class* cls = slot.take();
4102 TypedValue* propn = vmStack().indTV(1);
4103 TypedValue* output = propn;
4104 StringData* name;
4105 TypedValue* val;
4106 bool visible, accessible;
4107 lookup_sprop(vmfp(), cls, name, propn, val, visible, accessible);
4108 SCOPE_EXIT { decRefStr(name); };
4109 if (!(visible && accessible)) {
4110 raise_error("Invalid static property access: %s::%s",
4111 cls->name()->data(),
4112 name->data());
4114 setopBody(tvToCell(val), op, fr);
4115 tvRefcountedDecRef(propn);
4116 tvRefcountedDecRef(fr);
4117 cellDup(*tvToCell(val), *output);
4118 vmStack().ndiscard(1);
4121 OPTBLD_INLINE void iopIncDecL(local_var fr, IncDecOp op) {
4122 TypedValue* to = vmStack().allocTV();
4123 tvWriteUninit(to);
4124 if (UNLIKELY(fr.ptr->m_type == KindOfUninit)) {
4125 raise_undefined_local(vmfp(), fr.index);
4126 tvWriteNull(fr.ptr);
4127 } else {
4128 fr.ptr = tvToCell(fr.ptr);
4130 cellCopy(IncDecBody(op, fr.ptr), *to);
4133 OPTBLD_INLINE void iopIncDecN(IncDecOp op) {
4134 StringData* name;
4135 TypedValue* nameCell = vmStack().topTV();
4136 TypedValue* local = nullptr;
4137 lookupd_var(vmfp(), name, nameCell, local);
4138 auto oldNameCell = *nameCell;
4139 SCOPE_EXIT {
4140 decRefStr(name);
4141 tvRefcountedDecRef(oldNameCell);
4143 assert(local != nullptr);
4144 cellCopy(IncDecBody(op, tvToCell(local)), *nameCell);
4147 OPTBLD_INLINE void iopIncDecG(IncDecOp op) {
4148 StringData* name;
4149 TypedValue* nameCell = vmStack().topTV();
4150 TypedValue* gbl = nullptr;
4151 lookupd_gbl(vmfp(), name, nameCell, gbl);
4152 auto oldNameCell = *nameCell;
4153 SCOPE_EXIT {
4154 decRefStr(name);
4155 tvRefcountedDecRef(oldNameCell);
4157 assert(gbl != nullptr);
4158 cellCopy(IncDecBody(op, tvToCell(gbl)), *nameCell);
4161 OPTBLD_INLINE void iopIncDecS(IncDecOp op, clsref_slot slot) {
4162 SpropState ss(vmStack(), slot);
4163 if (!(ss.visible && ss.accessible)) {
4164 raise_error("Invalid static property access: %s::%s",
4165 ss.cls->name()->data(),
4166 ss.name->data());
4168 cellCopy(IncDecBody(op, tvToCell(ss.val)), *ss.output);
4171 OPTBLD_INLINE void iopBindL(local_var to) {
4172 Ref* fr = vmStack().topV();
4173 tvBind(fr, to.ptr);
4176 OPTBLD_INLINE void iopBindN() {
4177 StringData* name;
4178 TypedValue* fr = vmStack().topTV();
4179 TypedValue* nameTV = vmStack().indTV(1);
4180 TypedValue* to = nullptr;
4181 lookupd_var(vmfp(), name, nameTV, to);
4182 SCOPE_EXIT { decRefStr(name); };
4183 assert(to != nullptr);
4184 tvBind(fr, to);
4185 memcpy((void*)nameTV, (void*)fr, sizeof(TypedValue));
4186 vmStack().discard();
4189 OPTBLD_INLINE void iopBindG() {
4190 StringData* name;
4191 TypedValue* fr = vmStack().topTV();
4192 TypedValue* nameTV = vmStack().indTV(1);
4193 TypedValue* to = nullptr;
4194 lookupd_gbl(vmfp(), name, nameTV, to);
4195 SCOPE_EXIT { decRefStr(name); };
4196 assert(to != nullptr);
4197 tvBind(fr, to);
4198 memcpy((void*)nameTV, (void*)fr, sizeof(TypedValue));
4199 vmStack().discard();
4202 OPTBLD_INLINE void iopBindS(clsref_slot slot) {
4203 TypedValue* fr = vmStack().topTV();
4204 Class* cls = slot.take();
4205 TypedValue* propn = vmStack().indTV(1);
4206 TypedValue* output = propn;
4207 StringData* name;
4208 TypedValue* val;
4209 bool visible, accessible;
4210 lookup_sprop(vmfp(), cls, name, propn, val, visible, accessible);
4211 SCOPE_EXIT { decRefStr(name); };
4212 if (!(visible && accessible)) {
4213 raise_error("Invalid static property access: %s::%s",
4214 cls->name()->data(),
4215 name->data());
4217 tvBind(fr, val);
4218 tvRefcountedDecRef(propn);
4219 memcpy(output, fr, sizeof(TypedValue));
4220 vmStack().ndiscard(1);
4223 OPTBLD_INLINE void iopUnsetL(local_var loc) {
4224 tvUnset(loc.ptr);
4227 OPTBLD_INLINE void iopUnsetN() {
4228 StringData* name;
4229 TypedValue* tv1 = vmStack().topTV();
4230 TypedValue* tv = nullptr;
4231 lookup_var(vmfp(), name, tv1, tv);
4232 SCOPE_EXIT { decRefStr(name); };
4233 if (tv != nullptr) {
4234 tvUnset(tv);
4236 vmStack().popC();
4239 OPTBLD_INLINE void iopUnsetG() {
4240 TypedValue* tv1 = vmStack().topTV();
4241 StringData* name = lookup_name(tv1);
4242 SCOPE_EXIT { decRefStr(name); };
4243 VarEnv* varEnv = g_context->m_globalVarEnv;
4244 assert(varEnv != nullptr);
4245 varEnv->unset(name);
4246 vmStack().popC();
4249 OPTBLD_INLINE ActRec* fPushFuncImpl(const Func* func, int numArgs) {
4250 DEBUGGER_IF(phpBreakpointEnabled(func->name()->data()));
4251 ActRec* ar = vmStack().allocA();
4252 ar->m_func = func;
4253 ar->initNumArgs(numArgs);
4254 ar->trashVarEnv();
4255 setTypesFlag(vmfp(), ar);
4256 return ar;
4259 OPTBLD_INLINE void iopFPushFunc(intva_t numArgs) {
4260 Cell* c1 = vmStack().topC();
4261 if (c1->m_type == KindOfObject) {
4262 // this covers both closures and functors
4263 static StringData* invokeName = makeStaticString("__invoke");
4264 ObjectData* origObj = c1->m_data.pobj;
4265 const Class* cls = origObj->getVMClass();
4266 auto const func = cls->lookupMethod(invokeName);
4267 if (func == nullptr) {
4268 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4271 vmStack().discard();
4272 ActRec* ar = fPushFuncImpl(func, numArgs);
4273 if (func->isStaticInPrologue()) {
4274 ar->setClass(origObj->getVMClass());
4275 decRefObj(origObj);
4276 } else {
4277 ar->setThis(origObj);
4278 // Teleport the reference from the destroyed stack cell to the
4279 // ActRec. Don't try this at home.
4281 return;
4284 if (isArrayType(c1->m_type) || isStringType(c1->m_type)) {
4285 // support:
4286 // array($instance, 'method')
4287 // array('Class', 'method'),
4288 // 'func_name'
4289 // 'class::method'
4290 // which are all valid callables
4291 auto origCell = *c1;
4292 ObjectData* thiz = nullptr;
4293 HPHP::Class* cls = nullptr;
4294 StringData* invName = nullptr;
4296 auto const func = vm_decode_function(
4297 tvAsCVarRef(c1),
4298 vmfp(),
4299 /* forwarding */ false,
4300 thiz,
4301 cls,
4302 invName,
4303 DecodeFlags::NoWarn
4305 if (func == nullptr) {
4306 if (isArrayType(origCell.m_type)) {
4307 raise_error("Invalid callable (array)");
4308 } else {
4309 assert(isStringType(origCell.m_type));
4310 raise_error("Call to undefined function %s()",
4311 origCell.m_data.pstr->data());
4315 vmStack().discard();
4316 auto const ar = fPushFuncImpl(func, numArgs);
4317 if (thiz) {
4318 thiz->incRefCount();
4319 ar->setThis(thiz);
4320 } else if (cls) {
4321 ar->setClass(cls);
4322 } else {
4323 ar->trashThis();
4326 if (UNLIKELY(invName != nullptr)) {
4327 ar->setMagicDispatch(invName);
4329 if (origCell.m_type == KindOfArray) {
4330 decRefArr(origCell.m_data.parr);
4331 } else if (origCell.m_type == KindOfString) {
4332 decRefStr(origCell.m_data.pstr);
4334 return;
4337 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4340 OPTBLD_FLT_INLINE void iopFPushFuncD(intva_t numArgs, Id id) {
4341 const NamedEntityPair nep =
4342 vmfp()->m_func->unit()->lookupNamedEntityPairId(id);
4343 Func* func = Unit::loadFunc(nep.second, nep.first);
4344 if (func == nullptr) {
4345 raise_error("Call to undefined function %s()",
4346 vmfp()->m_func->unit()->lookupLitstrId(id)->data());
4348 ActRec* ar = fPushFuncImpl(func, numArgs);
4349 ar->trashThis();
4352 OPTBLD_INLINE void iopFPushFuncU(intva_t numArgs, Id nsFunc, Id globalFunc) {
4353 Unit* unit = vmfp()->m_func->unit();
4354 const NamedEntityPair nep = unit->lookupNamedEntityPairId(nsFunc);
4355 Func* func = Unit::loadFunc(nep.second, nep.first);
4356 if (func == nullptr) {
4357 const NamedEntityPair nep2 = unit->lookupNamedEntityPairId(globalFunc);
4358 func = Unit::loadFunc(nep2.second, nep2.first);
4359 if (func == nullptr) {
4360 const char *funcName = unit->lookupLitstrId(nsFunc)->data();
4361 raise_error("Call to undefined function %s()", funcName);
4364 ActRec* ar = fPushFuncImpl(func, numArgs);
4365 ar->trashThis();
4368 void fPushObjMethodImpl(StringData* name, ObjectData* obj, int numArgs) {
4369 const Func* f;
4370 LookupResult res;
4371 auto cls = obj->getVMClass();
4372 try {
4373 res = lookupObjMethod(
4374 f, cls, name, arGetContextClass(vmfp()), true);
4375 } catch (...) {
4376 decRefObj(obj);
4377 throw;
4379 assert(f);
4380 ActRec* ar = vmStack().allocA();
4381 ar->m_func = f;
4382 if (res == LookupResult::MethodFoundNoThis) {
4383 decRefObj(obj);
4384 ar->setClass(cls);
4385 } else {
4386 assert(res == LookupResult::MethodFoundWithThis ||
4387 res == LookupResult::MagicCallFound);
4388 /* Transfer ownership of obj to the ActRec*/
4389 ar->setThis(obj);
4391 ar->initNumArgs(numArgs);
4392 if (res == LookupResult::MagicCallFound) {
4393 ar->setMagicDispatch(name);
4394 } else {
4395 ar->trashVarEnv();
4396 decRefStr(name);
4398 setTypesFlag(vmfp(), ar);
4401 void fPushNullObjMethod(int numArgs) {
4402 assert(SystemLib::s_nullFunc);
4403 ActRec* ar = vmStack().allocA();
4404 ar->m_func = SystemLib::s_nullFunc;
4405 ar->trashThis();
4406 ar->initNumArgs(numArgs);
4407 ar->trashVarEnv();
4410 static void throw_call_non_object(const char* methodName,
4411 const char* typeName = nullptr) {
4412 std::string msg;
4413 folly::format(&msg, "Call to a member function {}() on a non-object ({})",
4414 methodName, typeName);
4416 if (RuntimeOption::ThrowExceptionOnBadMethodCall) {
4417 SystemLib::throwBadMethodCallExceptionObject(String(msg));
4419 raise_fatal_error(msg.c_str());
4422 OPTBLD_INLINE void iopFPushObjMethod(intva_t numArgs, ObjMethodOp op) {
4423 Cell* c1 = vmStack().topC(); // Method name.
4424 if (!isStringType(c1->m_type)) {
4425 raise_error(Strings::METHOD_NAME_MUST_BE_STRING);
4427 Cell* c2 = vmStack().indC(1); // Object.
4428 if (c2->m_type != KindOfObject) {
4429 if (UNLIKELY(op == ObjMethodOp::NullThrows || !isNullType(c2->m_type))) {
4430 throw_call_non_object(c1->m_data.pstr->data(),
4431 getDataTypeString(c2->m_type).get()->data());
4433 vmStack().popC();
4434 vmStack().popC();
4435 fPushNullObjMethod(numArgs);
4436 return;
4438 ObjectData* obj = c2->m_data.pobj;
4439 StringData* name = c1->m_data.pstr;
4440 // We handle decReffing obj and name in fPushObjMethodImpl
4441 vmStack().ndiscard(2);
4442 fPushObjMethodImpl(name, obj, numArgs);
4445 OPTBLD_INLINE void
4446 iopFPushObjMethodD(intva_t numArgs, const StringData* name, ObjMethodOp op) {
4447 Cell* c1 = vmStack().topC();
4448 if (c1->m_type != KindOfObject) {
4449 if (UNLIKELY(op == ObjMethodOp::NullThrows || !isNullType(c1->m_type))) {
4450 throw_call_non_object(name->data(),
4451 getDataTypeString(c1->m_type).get()->data());
4453 vmStack().popC();
4454 fPushNullObjMethod(numArgs);
4455 return;
4457 ObjectData* obj = c1->m_data.pobj;
4458 // We handle decReffing obj in fPushObjMethodImpl
4459 vmStack().discard();
4460 fPushObjMethodImpl(const_cast<StringData*>(name), obj, numArgs);
4463 template<bool forwarding>
4464 void pushClsMethodImpl(Class* cls, StringData* name, int numArgs) {
4465 auto const ctx = liveClass();
4466 auto obj = ctx && vmfp()->hasThis() ? vmfp()->getThis() : nullptr;
4467 const Func* f;
4468 auto const res = lookupClsMethod(f, cls, name, obj, ctx, true);
4469 if (res == LookupResult::MethodFoundNoThis ||
4470 res == LookupResult::MagicCallStaticFound) {
4471 if (!f->isStaticInPrologue()) {
4472 raise_missing_this(f);
4474 obj = nullptr;
4475 } else {
4476 assert(obj);
4477 assert(res == LookupResult::MethodFoundWithThis ||
4478 res == LookupResult::MagicCallFound);
4479 obj->incRefCount();
4481 assertx(f);
4482 ActRec* ar = vmStack().allocA();
4483 ar->m_func = f;
4484 if (obj) {
4485 ar->setThis(obj);
4486 } else {
4487 if (forwarding && ctx) {
4488 /* Propagate the current late bound class if there is one, */
4489 /* otherwise use the class given by this instruction's input */
4490 if (vmfp()->hasThis()) {
4491 cls = vmfp()->getThis()->getVMClass();
4492 } else {
4493 cls = vmfp()->getClass();
4496 ar->setClass(cls);
4498 ar->initNumArgs(numArgs);
4499 if (res == LookupResult::MagicCallFound ||
4500 res == LookupResult::MagicCallStaticFound) {
4501 ar->setMagicDispatch(name);
4502 } else {
4503 ar->trashVarEnv();
4504 decRefStr(const_cast<StringData*>(name));
4506 setTypesFlag(vmfp(), ar);
4509 OPTBLD_INLINE void iopFPushClsMethod(intva_t numArgs, clsref_slot slot) {
4510 Cell* c1 = vmStack().topC(); // Method name.
4511 if (!isStringType(c1->m_type)) {
4512 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4514 Class* cls = slot.take();
4515 StringData* name = c1->m_data.pstr;
4516 // pushClsMethodImpl will take care of decReffing name
4517 vmStack().ndiscard(1);
4518 assert(cls && name);
4519 pushClsMethodImpl<false>(cls, name, numArgs);
4522 OPTBLD_INLINE
4523 void iopFPushClsMethodD(intva_t numArgs, const StringData* name, Id classId) {
4524 const NamedEntityPair &nep =
4525 vmfp()->m_func->unit()->lookupNamedEntityPairId(classId);
4526 Class* cls = Unit::loadClass(nep.second, nep.first);
4527 if (cls == nullptr) {
4528 raise_error(Strings::UNKNOWN_CLASS, nep.first->data());
4530 pushClsMethodImpl<false>(cls, const_cast<StringData*>(name), numArgs);
4533 OPTBLD_INLINE void iopFPushClsMethodF(intva_t numArgs, clsref_slot slot) {
4534 Cell* c1 = vmStack().topC(); // Method name.
4535 if (!isStringType(c1->m_type)) {
4536 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4538 Class* cls = slot.take();
4539 StringData* name = c1->m_data.pstr;
4540 // pushClsMethodImpl will take care of decReffing name
4541 vmStack().ndiscard(1);
4542 pushClsMethodImpl<true>(cls, name, numArgs);
4545 OPTBLD_INLINE void iopFPushCtor(intva_t numArgs, clsref_slot slot) {
4546 Class* cls = slot.take();
4547 // Lookup the ctor
4548 const Func* f;
4549 auto res UNUSED = lookupCtorMethod(f, cls, arGetContextClass(vmfp()), true);
4550 assert(res == LookupResult::MethodFoundWithThis);
4551 // Replace input with uninitialized instance.
4552 ObjectData* this_ = newInstance(cls);
4553 TRACE(2, "FPushCtor: just new'ed an instance of class %s: %p\n",
4554 cls->name()->data(), this_);
4555 vmStack().pushObject(this_);
4556 // Push new activation record.
4557 ActRec* ar = vmStack().allocA();
4558 ar->m_func = f;
4559 ar->setThis(this_);
4560 ar->initNumArgs(numArgs);
4561 ar->trashVarEnv();
4562 setTypesFlag(vmfp(), ar);
4565 OPTBLD_INLINE void iopFPushCtorD(intva_t numArgs, Id id) {
4566 const NamedEntityPair &nep =
4567 vmfp()->m_func->unit()->lookupNamedEntityPairId(id);
4568 Class* cls = Unit::loadClass(nep.second, nep.first);
4569 if (cls == nullptr) {
4570 raise_error(Strings::UNKNOWN_CLASS,
4571 vmfp()->m_func->unit()->lookupLitstrId(id)->data());
4573 // Lookup the ctor
4574 const Func* f;
4575 auto res UNUSED = lookupCtorMethod(f, cls, arGetContextClass(vmfp()), true);
4576 assert(res == LookupResult::MethodFoundWithThis);
4577 // Push uninitialized instance.
4578 ObjectData* this_ = newInstance(cls);
4579 TRACE(2, "FPushCtorD: new'ed an instance of class %s: %p\n",
4580 cls->name()->data(), this_);
4581 vmStack().pushObject(this_);
4582 // Push new activation record.
4583 ActRec* ar = vmStack().allocA();
4584 ar->m_func = f;
4585 ar->setThis(this_);
4586 ar->initNumArgs(numArgs);
4587 ar->trashVarEnv();
4588 setTypesFlag(vmfp(), ar);
4591 OPTBLD_INLINE void iopFPushCtorI(intva_t numArgs, intva_t clsIx) {
4592 auto const func = vmfp()->m_func;
4593 auto const preCls = func->unit()->lookupPreClassId(clsIx);
4594 auto const cls = Unit::defClass(preCls, true);
4596 // Lookup the ctor
4597 const Func* f;
4598 auto res UNUSED = lookupCtorMethod(f, cls, arGetContextClass(vmfp()), true);
4599 assert(res == LookupResult::MethodFoundWithThis);
4600 // Push uninitialized instance.
4601 ObjectData* this_ = newInstance(cls);
4602 TRACE(2, "FPushCtorI: new'ed an instance of class %s: %p\n",
4603 cls->name()->data(), this_);
4604 vmStack().pushObject(this_);
4605 // Push new activation record.
4606 ActRec* ar = vmStack().allocA();
4607 ar->m_func = f;
4608 ar->setThis(this_);
4609 ar->initNumArgs(numArgs);
4610 ar->trashVarEnv();
4611 setTypesFlag(vmfp(), ar);
4614 OPTBLD_INLINE
4615 void iopDecodeCufIter(PC& pc, Iter* it, PC takenpc) {
4616 CufIter &cit = it->cuf();
4618 ObjectData* obj = nullptr;
4619 HPHP::Class* cls = nullptr;
4620 StringData* invName = nullptr;
4621 TypedValue *func = vmStack().topTV();
4623 ActRec* ar = vmfp();
4624 if (vmfp()->m_func->isBuiltin()) {
4625 ar = g_context->getOuterVMFrame(ar);
4627 const Func* f = vm_decode_function(tvAsVariant(func),
4628 ar, false,
4629 obj, cls, invName,
4630 DecodeFlags::NoWarn);
4632 if (f == nullptr) {
4633 pc = takenpc;
4634 } else {
4635 cit.setFunc(f);
4636 if (obj) {
4637 cit.setCtx(obj);
4638 obj->incRefCount();
4639 } else {
4640 cit.setCtx(cls);
4642 cit.setName(invName);
4644 vmStack().popC();
4647 OPTBLD_INLINE void iopFPushCufIter(intva_t numArgs, Iter* it) {
4648 auto f = it->cuf().func();
4649 auto o = it->cuf().ctx();
4650 auto n = it->cuf().name();
4652 ActRec* ar = vmStack().allocA();
4653 ar->m_func = f;
4654 assertx((f->implCls() != nullptr) == (o != nullptr));
4655 if (o) {
4656 ar->setThisOrClass(o);
4657 if (ActRec::checkThis(o)) ar->getThis()->incRefCount();
4658 } else {
4659 ar->trashThis();
4661 ar->initNumArgs(numArgs);
4662 if (n) {
4663 ar->setMagicDispatch(n);
4664 n->incRefCount();
4665 } else {
4666 ar->trashVarEnv();
4668 setTypesFlag(vmfp(), ar);
4671 OPTBLD_INLINE void doFPushCuf(int32_t numArgs, bool forward, bool safe) {
4672 TypedValue func = vmStack().topTV()[safe];
4674 ObjectData* obj = nullptr;
4675 HPHP::Class* cls = nullptr;
4676 StringData* invName = nullptr;
4678 const Func* f = vm_decode_function(
4679 tvAsVariant(&func), vmfp(), forward, obj, cls, invName,
4680 safe ? DecodeFlags::NoWarn : DecodeFlags::Warn);
4682 if (safe) vmStack().topTV()[1] = vmStack().topTV()[0];
4683 vmStack().ndiscard(1);
4684 if (f == nullptr) {
4685 f = SystemLib::s_nullFunc;
4686 obj = nullptr;
4687 cls = nullptr;
4688 if (safe) {
4689 vmStack().pushBool(false);
4691 } else if (safe) {
4692 vmStack().pushBool(true);
4695 ActRec* ar = vmStack().allocA();
4696 ar->m_func = f;
4697 if (obj) {
4698 ar->setThis(obj);
4699 obj->incRefCount();
4700 } else if (cls) {
4701 ar->setClass(cls);
4702 } else {
4703 ar->trashThis();
4705 ar->initNumArgs(numArgs);
4706 if (invName) {
4707 ar->setMagicDispatch(invName);
4708 } else {
4709 ar->trashVarEnv();
4711 setTypesFlag(vmfp(), ar);
4712 tvRefcountedDecRef(&func);
4715 OPTBLD_INLINE void iopFPushCuf(intva_t numArgs) {
4716 doFPushCuf(numArgs, false, false);
4719 OPTBLD_INLINE void iopFPushCufF(intva_t numArgs) {
4720 doFPushCuf(numArgs, true, false);
4723 OPTBLD_INLINE void iopFPushCufSafe(intva_t numArgs) {
4724 doFPushCuf(numArgs, false, true);
4727 OPTBLD_INLINE void iopFPassC(intva_t paramId) {
4728 assert(paramId < arFromSp(paramId + 1)->numArgs());
4731 OPTBLD_INLINE void iopFPassCW(intva_t paramId) {
4732 auto ar = arFromSp(paramId + 1);
4733 assert(paramId < ar->numArgs());
4734 auto const func = ar->m_func;
4735 if (func->mustBeRef(paramId)) {
4736 raise_strict_warning("Only variables should be passed by reference");
4740 OPTBLD_INLINE void iopFPassCE(intva_t paramId) {
4741 auto ar = arFromSp(paramId + 1);
4742 assert(paramId < ar->numArgs());
4743 auto const func = ar->m_func;
4744 if (func->mustBeRef(paramId)) {
4745 raise_error("Cannot pass parameter %d by reference", paramId+1);
4749 OPTBLD_INLINE void iopFPassV(intva_t paramId) {
4750 auto ar = arFromSp(paramId + 1);
4751 assert(paramId < ar->numArgs());
4752 const Func* func = ar->m_func;
4753 if (!func->byRef(paramId)) {
4754 vmStack().unbox();
4758 OPTBLD_INLINE void iopFPassVNop(intva_t paramId) {
4759 DEBUG_ONLY auto ar = arFromSp(paramId + 1);
4760 assert(paramId < ar->numArgs());
4761 assert(ar->m_func->byRef(paramId));
4764 OPTBLD_INLINE void iopFPassR(intva_t paramId) {
4765 auto ar = arFromSp(paramId + 1);
4766 assert(paramId < ar->numArgs());
4767 const Func* func = ar->m_func;
4768 if (func->byRef(paramId)) {
4769 TypedValue* tv = vmStack().topTV();
4770 if (tv->m_type != KindOfRef) {
4771 tvBox(tv);
4773 } else {
4774 if (vmStack().topTV()->m_type == KindOfRef) {
4775 vmStack().unbox();
4780 OPTBLD_INLINE void iopFPassL(intva_t paramId, local_var loc) {
4781 auto ar = arFromSp(paramId);
4782 assert(paramId < ar->numArgs());
4783 TypedValue* fr = loc.ptr;
4784 TypedValue* to = vmStack().allocTV();
4785 if (!ar->m_func->byRef(paramId)) {
4786 cgetl_body(vmfp(), fr, to, loc.index, true);
4787 } else {
4788 vgetl_body(fr, to);
4792 OPTBLD_INLINE void iopFPassN(intva_t paramId) {
4793 auto ar = arFromSp(paramId + 1);
4794 assert(paramId < ar->numArgs());
4795 if (!ar->m_func->byRef(paramId)) {
4796 iopCGetN();
4797 } else {
4798 iopVGetN();
4802 OPTBLD_INLINE void iopFPassG(intva_t paramId) {
4803 auto ar = arFromSp(paramId + 1);
4804 assert(paramId < ar->numArgs());
4805 if (!ar->m_func->byRef(paramId)) {
4806 iopCGetG();
4807 } else {
4808 iopVGetG();
4812 OPTBLD_INLINE void iopFPassS(intva_t paramId, clsref_slot slot) {
4813 auto ar = arFromSp(paramId + 1);
4814 assert(paramId < ar->numArgs());
4815 if (!ar->m_func->byRef(paramId)) {
4816 iopCGetS(slot);
4817 } else {
4818 iopVGetS(slot);
4822 bool doFCall(ActRec* ar, PC& pc) {
4823 TRACE(3, "FCall: pc %p func %p base %d\n", vmpc(),
4824 vmfp()->m_func->unit()->entry(),
4825 int(vmfp()->m_func->base()));
4826 prepareFuncEntry(ar, pc, StackArgsState::Untrimmed);
4827 vmpc() = pc;
4828 if (EventHook::FunctionCall(ar, EventHook::NormalFunc)) return true;
4829 pc = vmpc();
4830 return false;
4833 OPTBLD_INLINE void iopFCall(PC& pc, intva_t numArgs) {
4834 auto ar = arFromSp(numArgs);
4835 assert(numArgs == ar->numArgs());
4836 checkStack(vmStack(), ar->m_func, 0);
4837 ar->setReturn(vmfp(), pc, jit::tc::ustubs().retHelper);
4838 doFCall(ar, pc);
4841 OPTBLD_FLT_INLINE
4842 void iopFCallD(PC& pc, intva_t numArgs, const StringData* clsName,
4843 const StringData* funcName) {
4844 auto ar = arFromSp(numArgs);
4845 if (!RuntimeOption::EvalJitEnableRenameFunction &&
4846 !(ar->m_func->attrs() & AttrInterceptable)) {
4847 assert(ar->m_func->name()->isame(funcName));
4849 assert(numArgs == ar->numArgs());
4850 checkStack(vmStack(), ar->m_func, 0);
4851 ar->setReturn(vmfp(), pc, jit::tc::ustubs().retHelper);
4852 doFCall(ar, pc);
4855 OPTBLD_INLINE
4856 void iopFCallAwait(PC& pc, intva_t numArgs,
4857 const StringData* clsName, const StringData* funcName) {
4858 auto ar = arFromSp(numArgs);
4859 if (!RuntimeOption::EvalJitEnableRenameFunction &&
4860 !(ar->m_func->attrs() & AttrInterceptable)) {
4861 assert(ar->m_func->name()->isame(funcName));
4863 assert(numArgs == ar->numArgs());
4864 checkStack(vmStack(), ar->m_func, 0);
4865 ar->setReturn(vmfp(), pc, jit::tc::ustubs().retHelper);
4866 ar->setFCallAwait();
4867 doFCall(ar, pc);
4870 OPTBLD_FLT_INLINE
4871 void iopFCallBuiltin(intva_t numArgs, intva_t numNonDefault, Id id) {
4872 const NamedEntity* ne = vmfp()->m_func->unit()->lookupNamedEntityId(id);
4873 auto unit = vmfp()->func()->unit();
4874 auto strict = builtinCallUsesStrictTypes(unit);
4875 Func* func = Unit::lookupFunc(ne);
4876 if (func == nullptr) {
4877 raise_error("Call to undefined function %s()",
4878 vmfp()->m_func->unit()->lookupLitstrId(id)->data());
4881 TypedValue* args = vmStack().indTV(numArgs-1);
4882 TypedValue ret;
4883 if (Native::coerceFCallArgs(args, numArgs, numNonDefault, func, strict)) {
4884 if (func->hasVariadicCaptureParam()) {
4885 assertx(numArgs > 0);
4886 assertx(isArrayType(args[1-numArgs].m_type));
4888 Native::callFunc<true>(func, nullptr, args, numNonDefault, ret);
4889 } else {
4890 if (func->attrs() & AttrParamCoerceModeNull) {
4891 ret.m_type = KindOfNull;
4892 } else {
4893 assert(func->attrs() & AttrParamCoerceModeFalse);
4894 ret.m_type = KindOfBoolean;
4895 ret.m_data.num = 0;
4899 frame_free_args(args, numNonDefault);
4900 vmStack().ndiscard(numArgs);
4901 tvCopy(ret, *vmStack().allocTV());
4904 enum class CallArrOnInvalidContainer {
4905 // task #1756122: warning and returning null is what we /should/ always
4906 // do in call_user_func_array, but some code depends on the broken
4907 // behavior of casting the list of args to FCallArray to an array.
4908 CastToArray,
4909 WarnAndReturnNull,
4910 WarnAndContinue
4913 static bool doFCallArray(PC& pc, int numStackValues,
4914 CallArrOnInvalidContainer onInvalid,
4915 void* ret = nullptr) {
4916 assert(numStackValues >= 1);
4917 ActRec* ar = (ActRec*)(vmStack().top() + numStackValues);
4918 assert(ar->numArgs() == numStackValues);
4920 Cell* c1 = vmStack().topC();
4921 if (UNLIKELY(!isContainer(*c1))) {
4922 switch (onInvalid) {
4923 case CallArrOnInvalidContainer::CastToArray:
4924 tvCastToArrayInPlace(c1);
4925 break;
4926 case CallArrOnInvalidContainer::WarnAndReturnNull:
4927 vmStack().pushNull();
4928 cleanupParamsAndActRec(vmStack(), ar, nullptr, nullptr);
4929 raise_warning("call_user_func_array() expects parameter 2 to be array");
4930 return false;
4931 case CallArrOnInvalidContainer::WarnAndContinue: {
4932 Cell tmp = *c1;
4933 // argument_unpacking RFC dictates "containers and Traversables"
4934 raise_warning_unsampled("Only containers may be unpacked");
4935 c1->m_type = KindOfPersistentArray;
4936 c1->m_data.parr = staticEmptyArray();
4937 tvRefcountedDecRef(&tmp);
4938 break;
4943 const Func* func = ar->m_func;
4945 Cell args = *c1;
4946 vmStack().discard(); // prepareArrayArgs will push arguments onto the stack
4947 numStackValues--;
4948 SCOPE_EXIT { tvRefcountedDecRef(&args); };
4949 checkStack(vmStack(), func, 0);
4951 assert(!ar->resumed());
4952 TRACE(3, "FCallArray: pc %p func %p base %d\n", vmpc(),
4953 vmfp()->unit()->entry(),
4954 int(vmfp()->m_func->base()));
4955 ar->setReturn(vmfp(), pc, jit::tc::ustubs().retHelper);
4957 // When called from the jit, populate the correct return address
4958 if (ret) {
4959 ar->setJitReturn(ret);
4962 auto prepResult = prepareArrayArgs(ar, args, vmStack(), numStackValues,
4963 /* ref param checks */ true, nullptr);
4964 if (UNLIKELY(!prepResult)) {
4965 vmStack().pushNull(); // return value is null if args are invalid
4966 return false;
4970 prepareFuncEntry(ar, pc, StackArgsState::Trimmed);
4971 vmpc() = pc;
4972 if (UNLIKELY(!EventHook::FunctionCall(ar, EventHook::NormalFunc))) {
4973 pc = vmpc();
4974 return false;
4976 return true;
4979 bool doFCallArrayTC(PC pc, int32_t numArgs, void* retAddr) {
4980 assert_native_stack_aligned();
4981 assert(tl_regState == VMRegState::DIRTY);
4982 tl_regState = VMRegState::CLEAN;
4983 auto onInvalid = CallArrOnInvalidContainer::WarnAndContinue;
4984 if (!numArgs) {
4985 numArgs = 1;
4986 onInvalid = CallArrOnInvalidContainer::CastToArray;
4988 auto const ret = doFCallArray(pc, numArgs, onInvalid, retAddr);
4989 tl_regState = VMRegState::DIRTY;
4990 return ret;
4993 OPTBLD_INLINE void iopFCallArray(PC& pc) {
4994 doFCallArray(pc, 1, CallArrOnInvalidContainer::CastToArray);
4997 OPTBLD_INLINE void iopFCallUnpack(PC& pc, intva_t numArgs) {
4998 auto ar = arFromSp(numArgs);
4999 assert(numArgs == ar->numArgs());
5000 checkStack(vmStack(), ar->m_func, 0);
5001 doFCallArray(pc, numArgs, CallArrOnInvalidContainer::WarnAndContinue);
5004 OPTBLD_INLINE void iopCufSafeArray() {
5005 Array ret;
5006 ret.append(tvAsVariant(vmStack().top() + 1));
5007 ret.appendWithRef(tvAsVariant(vmStack().top() + 0));
5008 vmStack().popTV();
5009 vmStack().popTV();
5010 tvAsVariant(vmStack().top()) = ret;
5013 OPTBLD_INLINE void iopCufSafeReturn() {
5014 bool ok = cellToBool(*tvToCell(vmStack().top() + 1));
5015 tvRefcountedDecRef(vmStack().top() + 1);
5016 tvRefcountedDecRef(vmStack().top() + (ok ? 2 : 0));
5017 if (ok) vmStack().top()[2] = vmStack().top()[0];
5018 vmStack().ndiscard(2);
5021 inline bool initIterator(PC& pc, PC targetpc, Iter* it, Cell* c1) {
5022 bool hasElems = it->init(c1);
5023 if (!hasElems) pc = targetpc;
5024 vmStack().popC();
5025 return hasElems;
5028 OPTBLD_INLINE void iopIterInit(PC& pc, Iter* it, PC targetpc, local_var val) {
5029 Cell* c1 = vmStack().topC();
5030 if (initIterator(pc, targetpc, it, c1)) {
5031 tvAsVariant(val.ptr) = it->arr().second();
5035 OPTBLD_INLINE
5036 void iopIterInitK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5037 Cell* c1 = vmStack().topC();
5038 if (initIterator(pc, targetpc, it, c1)) {
5039 tvAsVariant(val.ptr) = it->arr().second();
5040 tvAsVariant(key.ptr) = it->arr().first();
5044 OPTBLD_INLINE void iopWIterInit(PC& pc, Iter* it, PC targetpc, local_var val) {
5045 Cell* c1 = vmStack().topC();
5046 if (initIterator(pc, targetpc, it, c1)) {
5047 tvAsVariant(val.ptr).setWithRef(it->arr().secondRefPlus());
5051 OPTBLD_INLINE void
5052 iopWIterInitK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5053 Cell* c1 = vmStack().topC();
5054 if (initIterator(pc, targetpc, it, c1)) {
5055 tvAsVariant(val.ptr).setWithRef(it->arr().secondRefPlus());
5056 tvAsVariant(key.ptr) = it->arr().first();
5060 inline bool initIteratorM(Iter* it, Ref* r1, TypedValue *val, TypedValue *key) {
5061 TypedValue* rtv = r1->m_data.pref->tv();
5062 if (isArrayLikeType(rtv->m_type)) {
5063 return new_miter_array_key(it, r1->m_data.pref, val, key);
5065 if (rtv->m_type == KindOfObject) {
5066 Class* ctx = arGetContextClass(vmfp());
5067 return new_miter_object(it, r1->m_data.pref, ctx, val, key);
5069 return new_miter_other(it, r1->m_data.pref);
5072 OPTBLD_INLINE void iopMIterInit(PC& pc, Iter* it, PC targetpc, local_var val) {
5073 Ref* r1 = vmStack().topV();
5074 assert(r1->m_type == KindOfRef);
5075 if (!initIteratorM(it, r1, val.ptr, nullptr)) {
5076 pc = targetpc; // nothing to iterate; exit foreach loop.
5078 vmStack().popV();
5081 OPTBLD_INLINE void
5082 iopMIterInitK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5083 Ref* r1 = vmStack().topV();
5084 assert(r1->m_type == KindOfRef);
5085 if (!initIteratorM(it, r1, val.ptr, key.ptr)) {
5086 pc = targetpc; // nothing to iterate; exit foreach loop.
5088 vmStack().popV();
5091 OPTBLD_INLINE void iopIterNext(PC& pc, Iter* it, PC targetpc, local_var val) {
5092 jmpSurpriseCheck(targetpc - pc);
5093 if (it->next()) {
5094 pc = targetpc;
5095 tvAsVariant(val.ptr) = it->arr().second();
5099 OPTBLD_INLINE
5100 void iopIterNextK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5101 jmpSurpriseCheck(targetpc - pc);
5102 if (it->next()) {
5103 pc = targetpc;
5104 tvAsVariant(val.ptr) = it->arr().second();
5105 tvAsVariant(key.ptr) = it->arr().first();
5109 OPTBLD_INLINE void iopWIterNext(PC& pc, Iter* it, PC targetpc, local_var val) {
5110 jmpSurpriseCheck(targetpc - pc);
5111 if (it->next()) {
5112 pc = targetpc;
5113 tvAsVariant(val.ptr).setWithRef(it->arr().secondRefPlus());
5117 OPTBLD_INLINE void
5118 iopWIterNextK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5119 jmpSurpriseCheck(targetpc - pc);
5120 if (it->next()) {
5121 pc = targetpc;
5122 tvAsVariant(val.ptr).setWithRef(it->arr().secondRefPlus());
5123 tvAsVariant(key.ptr) = it->arr().first();
5127 OPTBLD_INLINE void iopMIterNext(PC& pc, Iter* it, PC targetpc, local_var val) {
5128 jmpSurpriseCheck(targetpc - pc);
5129 if (miter_next_key(it, val.ptr, nullptr)) {
5130 pc = targetpc;
5134 OPTBLD_INLINE void
5135 iopMIterNextK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5136 jmpSurpriseCheck(targetpc - pc);
5137 if (miter_next_key(it, val.ptr, key.ptr)) {
5138 pc = targetpc;
5142 OPTBLD_INLINE void iopIterFree(Iter* it) {
5143 it->free();
5146 OPTBLD_INLINE void iopMIterFree(Iter* it) {
5147 it->mfree();
5150 OPTBLD_INLINE void iopCIterFree(Iter* it) {
5151 it->cfree();
5154 OPTBLD_INLINE void inclOp(PC& pc, InclOpFlags flags, const char* opName) {
5155 Cell* c1 = vmStack().topC();
5156 auto path = String::attach(prepareKey(*c1));
5157 bool initial;
5158 TRACE(2, "inclOp %s %s %s %s \"%s\"\n",
5159 flags & InclOpFlags::Once ? "Once" : "",
5160 flags & InclOpFlags::DocRoot ? "DocRoot" : "",
5161 flags & InclOpFlags::Relative ? "Relative" : "",
5162 flags & InclOpFlags::Fatal ? "Fatal" : "",
5163 path.data());
5165 auto curUnitFilePath = [&] {
5166 namespace fs = boost::filesystem;
5167 fs::path currentUnit(vmfp()->m_func->unit()->filepath()->data());
5168 fs::path currentDir(currentUnit.branch_path());
5169 return currentDir.string();
5172 auto const unit = [&] {
5173 if (flags & InclOpFlags::Relative) {
5174 String absPath = curUnitFilePath() + '/';
5175 absPath += path;
5176 return lookupUnit(absPath.get(), "", &initial);
5178 if (flags & InclOpFlags::DocRoot) {
5179 return lookupUnit(
5180 SourceRootInfo::RelativeToPhpRoot(path).get(), "", &initial);
5182 return lookupUnit(path.get(), curUnitFilePath().c_str(), &initial);
5183 }();
5185 vmStack().popC();
5186 if (unit == nullptr) {
5187 if (flags & InclOpFlags::Fatal) {
5188 raise_error("%s(%s): File not found", opName, path.data());
5189 } else {
5190 raise_warning("%s(%s): File not found", opName, path.data());
5192 vmStack().pushBool(false);
5193 return;
5196 if (!(flags & InclOpFlags::Once) || initial) {
5197 g_context->evalUnit(unit, pc, EventHook::PseudoMain);
5198 } else {
5199 Stats::inc(Stats::PseudoMain_Guarded);
5200 vmStack().pushBool(true);
5204 OPTBLD_INLINE void iopIncl(PC& pc) {
5205 inclOp(pc, InclOpFlags::Default, "include");
5208 OPTBLD_INLINE void iopInclOnce(PC& pc) {
5209 inclOp(pc, InclOpFlags::Once, "include_once");
5212 OPTBLD_INLINE void iopReq(PC& pc) {
5213 inclOp(pc, InclOpFlags::Fatal, "require");
5216 OPTBLD_INLINE void iopReqOnce(PC& pc) {
5217 inclOp(pc, InclOpFlags::Fatal | InclOpFlags::Once, "require_once");
5220 OPTBLD_INLINE void iopReqDoc(PC& pc) {
5221 inclOp(
5223 InclOpFlags::Fatal | InclOpFlags::Once | InclOpFlags::DocRoot,
5224 "require_once"
5228 OPTBLD_INLINE void iopEval(PC& pc) {
5229 Cell* c1 = vmStack().topC();
5231 if (UNLIKELY(RuntimeOption::EvalAuthoritativeMode)) {
5232 // Ahead of time whole program optimizations need to assume it can
5233 // see all the code, or it really can't do much.
5234 raise_error("You can't use eval in RepoAuthoritative mode");
5237 auto code = String::attach(prepareKey(*c1));
5238 String prefixedCode = concat("<?php ", code);
5240 auto evalFilename = std::string();
5241 auto vm = &*g_context;
5242 string_printf(
5243 evalFilename,
5244 "%s(%d)(%s" EVAL_FILENAME_SUFFIX,
5245 vm->getContainingFileName()->data(),
5246 vm->getLine(),
5247 string_md5(code.slice()).c_str()
5249 Unit* unit = vm->compileEvalString(prefixedCode.get(), evalFilename.c_str());
5250 if (!RuntimeOption::EvalJitEvaledCode) {
5251 unit->setInterpretOnly();
5253 const StringData* msg;
5254 int line = 0;
5256 vmStack().popC();
5257 if (unit->parseFatal(msg, line)) {
5258 auto const errnum = static_cast<int>(ErrorMode::WARNING);
5259 if (vm->errorNeedsLogging(errnum)) {
5260 // manual call to Logger instead of logError as we need to use
5261 // evalFileName and line as the exception doesn't track the eval()
5262 Logger::Error(
5263 "\nFatal error: %s in %s on line %d",
5264 msg->data(),
5265 evalFilename.c_str(),
5266 line
5270 vmStack().pushBool(false);
5271 return;
5273 vm->evalUnit(unit, pc, EventHook::Eval);
5276 OPTBLD_INLINE void iopDefFunc(intva_t fid) {
5277 Func* f = vmfp()->m_func->unit()->lookupFuncId(fid);
5278 setCachedFunc(f, isDebuggerAttached());
5281 OPTBLD_INLINE void iopDefCls(intva_t cid) {
5282 PreClass* c = vmfp()->m_func->unit()->lookupPreClassId(cid);
5283 Unit::defClass(c);
5286 OPTBLD_INLINE void iopAliasCls(const StringData* original,
5287 const StringData* alias) {
5288 TypedValue* aloadTV = vmStack().topTV();
5289 tvCastToBooleanInPlace(aloadTV);
5290 assert(aloadTV->m_type == KindOfBoolean);
5291 bool autoload = aloadTV->m_data.num;
5292 vmStack().popX();
5294 vmStack().pushBool(Unit::aliasClass(original, alias, autoload));
5297 OPTBLD_INLINE void iopDefClsNop(intva_t cid) {
5300 OPTBLD_INLINE void iopDefTypeAlias(intva_t tid) {
5301 vmfp()->func()->unit()->defTypeAlias(tid);
5304 static inline void checkThis(ActRec* fp) {
5305 if (!fp->func()->cls() || !fp->hasThis()) {
5306 raise_error(Strings::FATAL_NULL_THIS);
5310 OPTBLD_INLINE void iopThis() {
5311 checkThis(vmfp());
5312 ObjectData* this_ = vmfp()->getThis();
5313 vmStack().pushObject(this_);
5316 OPTBLD_INLINE void iopBareThis(BareThisOp bto) {
5317 if (vmfp()->func()->cls() && vmfp()->hasThis()) {
5318 ObjectData* this_ = vmfp()->getThis();
5319 vmStack().pushObject(this_);
5320 } else {
5321 vmStack().pushNull();
5322 switch (bto) {
5323 case BareThisOp::Notice: raise_notice(Strings::WARN_NULL_THIS); break;
5324 case BareThisOp::NoNotice: break;
5325 case BareThisOp::NeverNull:
5326 assert(!"$this cannot be null in BareThis with NeverNull option");
5327 break;
5332 OPTBLD_INLINE void iopCheckThis() {
5333 checkThis(vmfp());
5336 OPTBLD_INLINE void iopInitThisLoc(local_var thisLoc) {
5337 tvRefcountedDecRef(thisLoc.ptr);
5338 if (vmfp()->func()->cls() && vmfp()->hasThis()) {
5339 thisLoc->m_data.pobj = vmfp()->getThis();
5340 thisLoc->m_type = KindOfObject;
5341 tvIncRef(thisLoc.ptr);
5342 } else {
5343 tvWriteUninit(thisLoc.ptr);
5347 static inline RefData* lookupStatic(const StringData* name,
5348 const ActRec* fp,
5349 bool& inited) {
5350 auto const func = fp->m_func;
5352 if (UNLIKELY(func->isClosureBody())) {
5353 assert(!func->hasVariadicCaptureParam());
5354 return lookupStaticFromClosure(
5355 frame_local(fp, func->numParams())->m_data.pobj, name, inited);
5358 auto const refData = rds::bindStaticLocal(func, name);
5359 inited = refData.isInit();
5360 if (!inited) {
5361 refData->initInRDS();
5362 refData.markInit();
5364 return refData.get();
5367 OPTBLD_INLINE void iopStaticLoc(local_var loc, const StringData* var) {
5368 bool inited;
5369 auto const refData = lookupStatic(var, vmfp(), inited);
5370 if (!inited) {
5371 refData->tv()->m_type = KindOfNull;
5373 auto const tmpTV = make_tv<KindOfRef>(refData);
5374 tvBind(&tmpTV, loc.ptr);
5375 vmStack().pushBool(inited);
5378 OPTBLD_INLINE void iopStaticLocInit(local_var loc, const StringData* var) {
5379 bool inited;
5380 auto const refData = lookupStatic(var, vmfp(), inited);
5382 if (!inited) {
5383 auto const initVal = vmStack().topC();
5384 cellDup(*initVal, *refData->tv());
5387 auto const tmpTV = make_tv<KindOfRef>(refData);
5388 tvBind(&tmpTV, loc.ptr);
5389 vmStack().discard();
5392 OPTBLD_INLINE void iopCatch() {
5393 auto vm = &*g_context;
5394 assert(vm->m_faults.size() > 0);
5395 Fault fault = vm->m_faults.back();
5396 vm->m_faults.pop_back();
5397 assert(fault.m_raiseFrame == vmfp());
5398 assert(fault.m_userException);
5399 vmStack().pushObjectNoRc(fault.m_userException);
5402 OPTBLD_INLINE void iopLateBoundCls(clsref_slot slot) {
5403 Class* cls = frameStaticClass(vmfp());
5404 if (!cls) {
5405 raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
5407 slot.put(cls);
5410 OPTBLD_INLINE void iopVerifyParamType(local_var param) {
5411 const Func *func = vmfp()->m_func;
5412 assert(param.index < func->numParams());
5413 assert(func->numParams() == int(func->params().size()));
5414 const TypeConstraint& tc = func->params()[param.index].typeConstraint;
5415 assert(tc.hasConstraint());
5416 bool useStrictTypes =
5417 func->unit()->isHHFile() || RuntimeOption::EnableHipHopSyntax ||
5418 !vmfp()->useWeakTypes();
5419 if (!tc.isTypeVar() && !tc.isTypeConstant()) {
5420 tc.verifyParam(param.ptr, func, param.index, useStrictTypes);
5424 OPTBLD_INLINE void implVerifyRetType() {
5425 if (LIKELY(!RuntimeOption::EvalCheckReturnTypeHints)) {
5426 return;
5429 const auto func = vmfp()->m_func;
5430 const auto tc = func->returnTypeConstraint();
5431 bool useStrictTypes = func->unit()->useStrictTypes();
5432 if (!tc.isTypeVar() && !tc.isTypeConstant()) {
5433 tc.verifyReturn(vmStack().topTV(), func, useStrictTypes);
5437 OPTBLD_INLINE void iopVerifyRetTypeC() {
5438 implVerifyRetType();
5441 OPTBLD_INLINE void iopVerifyRetTypeV() {
5442 implVerifyRetType();
5445 OPTBLD_INLINE TCA iopNativeImpl(PC& pc) {
5446 auto const jitReturn = jitReturnPre(vmfp());
5448 BuiltinFunction func = vmfp()->func()->builtinFuncPtr();
5449 assert(func);
5450 // Actually call the native implementation. This will handle freeing the
5451 // locals in the normal case. In the case of an exception, the VM unwinder
5452 // will take care of it.
5453 func(vmfp());
5455 // Grab caller info from ActRec.
5456 ActRec* sfp = vmfp()->sfp();
5457 Offset soff = vmfp()->m_soff;
5459 // Adjust the stack; the native implementation put the return value in the
5460 // right place for us already
5461 vmStack().ndiscard(vmfp()->func()->numSlotsInFrame());
5462 vmStack().ret();
5464 // Return control to the caller.
5465 vmfp() = sfp;
5466 pc = LIKELY(vmfp() != nullptr) ? vmfp()->func()->getEntry() + soff : nullptr;
5468 return jitReturnPost(jitReturn);
5471 OPTBLD_INLINE void iopSelf(clsref_slot slot) {
5472 Class* clss = arGetContextClass(vmfp());
5473 if (!clss) {
5474 raise_error(HPHP::Strings::CANT_ACCESS_SELF);
5476 slot.put(clss);
5479 OPTBLD_INLINE void iopParent(clsref_slot slot) {
5480 Class* clss = arGetContextClass(vmfp());
5481 if (!clss) {
5482 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
5484 Class* parent = clss->parent();
5485 if (!parent) {
5486 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
5488 slot.put(parent);
5491 OPTBLD_INLINE void iopCreateCl(intva_t numArgs, intva_t clsIx) {
5492 auto const func = vmfp()->m_func;
5493 auto const preCls = func->unit()->lookupPreClassId(clsIx);
5494 auto const c = Unit::defClosure(preCls);
5496 auto const cls = c->rescope(const_cast<Class*>(func->cls()));
5497 auto obj = newInstance(cls);
5498 c_Closure::fromObject(obj)->init(numArgs, vmfp(), vmStack().top());
5499 vmStack().ndiscard(numArgs);
5500 vmStack().pushObjectNoRc(obj);
5503 static inline BaseGenerator* this_base_generator(const ActRec* fp) {
5504 auto const obj = fp->getThis();
5505 assert(obj->getVMClass() == AsyncGenerator::getClass() ||
5506 obj->getVMClass() == Generator::getClass());
5507 return obj->getVMClass() == Generator::getClass()
5508 ? static_cast<BaseGenerator*>(Generator::fromObject(obj))
5509 : static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj));
5512 static inline Generator* this_generator(const ActRec* fp) {
5513 auto const obj = fp->getThis();
5514 return Generator::fromObject(obj);
5517 const StaticString s_this("this");
5519 OPTBLD_INLINE TCA iopCreateCont(PC& pc) {
5520 auto const jitReturn = jitReturnPre(vmfp());
5522 auto const fp = vmfp();
5523 auto const func = fp->func();
5524 auto const numSlots = func->numSlotsInFrame();
5525 auto const resumeOffset = func->unit()->offsetOf(pc);
5526 assert(!fp->resumed());
5527 assert(func->isGenerator());
5529 // Create the {Async,}Generator object. Create takes care of copying local
5530 // variables and iterators.
5531 auto const obj = func->isAsync()
5532 ? AsyncGenerator::Create(fp, numSlots, nullptr, resumeOffset)
5533 : Generator::Create<false>(fp, numSlots, nullptr, resumeOffset);
5535 auto const genData = func->isAsync() ?
5536 static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj)) :
5537 static_cast<BaseGenerator*>(Generator::fromObject(obj));
5539 EventHook::FunctionSuspendE(fp, genData->actRec());
5541 // Grab caller info from ActRec.
5542 ActRec* sfp = fp->sfp();
5543 Offset soff = fp->m_soff;
5545 // Free ActRec and store the return value.
5546 vmStack().ndiscard(numSlots);
5547 vmStack().ret();
5548 tvCopy(make_tv<KindOfObject>(obj), *vmStack().topTV());
5549 assert(vmStack().topTV() == fp->retSlot());
5551 // Return control to the caller.
5552 vmfp() = sfp;
5553 pc = LIKELY(sfp != nullptr) ? sfp->func()->getEntry() + soff : nullptr;
5555 return jitReturnPost(jitReturn);
5558 OPTBLD_INLINE void moveProgramCounterIntoGenerator(PC &pc, BaseGenerator* gen) {
5559 assert(gen->isRunning());
5560 ActRec* genAR = gen->actRec();
5561 genAR->setReturn(vmfp(), pc, genAR->func()->isAsync() ?
5562 jit::tc::ustubs().asyncGenRetHelper :
5563 jit::tc::ustubs().genRetHelper);
5565 vmfp() = genAR;
5567 assert(genAR->func()->contains(gen->resumable()->resumeOffset()));
5568 pc = genAR->func()->unit()->at(gen->resumable()->resumeOffset());
5569 vmpc() = pc;
5572 OPTBLD_INLINE bool tvIsGenerator(TypedValue tv) {
5573 return tv.m_type == KindOfObject &&
5574 tv.m_data.pobj->instanceof(Generator::getClass());
5577 template<bool recursive>
5578 OPTBLD_INLINE void contEnterImpl(PC& pc) {
5580 // The stack must have one cell! Or else resumableStackBase() won't work!
5581 assert(vmStack().top() + 1 ==
5582 (TypedValue*)vmfp() - vmfp()->m_func->numSlotsInFrame());
5584 // Do linkage of the generator's AR.
5585 assert(vmfp()->hasThis());
5586 // `recursive` determines whether we enter just the top generator or whether
5587 // we drop down to the lowest running delegate generator. This is useful for
5588 // ContRaise, which should throw from the context of the lowest generator.
5589 if(!recursive || vmfp()->getThis()->getVMClass() != Generator::getClass()) {
5590 moveProgramCounterIntoGenerator(pc, this_base_generator(vmfp()));
5591 } else {
5592 // TODO(https://github.com/facebook/hhvm/issues/6040)
5593 // Implement throwing from delegate generators.
5594 assert(vmfp()->getThis()->getVMClass() == Generator::getClass());
5595 auto gen = this_generator(vmfp());
5596 if (gen->m_delegate.m_type != KindOfNull) {
5597 SystemLib::throwExceptionObject("Throwing from a delegate generator is "
5598 "not currently supported in HHVM");
5600 moveProgramCounterIntoGenerator(pc, gen);
5603 EventHook::FunctionResumeYield(vmfp());
5606 OPTBLD_INLINE void iopContEnter(PC& pc) {
5607 contEnterImpl<false>(pc);
5610 OPTBLD_INLINE void iopContRaise(PC& pc) {
5611 contEnterImpl<true>(pc);
5612 iopThrow();
5615 OPTBLD_INLINE void moveProgramCounterToCaller(PC& pc) {
5616 auto fp = vmfp();
5617 // Grab caller info from ActRec.
5618 ActRec* sfp = fp->sfp();
5619 Offset soff = fp->m_soff;
5621 // Return control to the next()/send()/raise() caller.
5622 vmfp() = sfp;
5623 pc = sfp != nullptr ? sfp->func()->getEntry() + soff : nullptr;
5626 OPTBLD_INLINE TCA yield(PC& pc, const Cell* key, const Cell value) {
5627 auto const jitReturn = jitReturnPre(vmfp());
5629 auto const fp = vmfp();
5630 auto const func = fp->func();
5631 auto const resumeOffset = func->unit()->offsetOf(pc);
5632 assert(fp->resumed());
5633 assert(func->isGenerator());
5635 EventHook::FunctionSuspendR(fp, nullptr);
5637 if (!func->isAsync()) {
5638 // Non-async generator.
5639 assert(fp->sfp());
5640 frame_generator(fp)->yield(resumeOffset, key, value);
5642 // Push return value of next()/send()/raise().
5643 vmStack().pushNull();
5644 } else {
5645 // Async generator.
5646 auto const gen = frame_async_generator(fp);
5647 auto const eagerResult = gen->yield(resumeOffset, key, value);
5648 if (eagerResult) {
5649 // Eager execution => return StaticWaitHandle.
5650 assert(fp->sfp());
5651 vmStack().pushObjectNoRc(eagerResult);
5652 } else {
5653 // Resumed execution => return control to the scheduler.
5654 assert(!fp->sfp());
5658 moveProgramCounterToCaller(pc);
5660 return jitReturnPost(jitReturn);
5663 OPTBLD_INLINE TCA iopYield(PC& pc) {
5664 auto const value = *vmStack().topC();
5665 vmStack().discard();
5666 return yield(pc, nullptr, value);
5669 OPTBLD_INLINE TCA iopYieldK(PC& pc) {
5670 auto const key = *vmStack().indC(1);
5671 auto const value = *vmStack().topC();
5672 vmStack().ndiscard(2);
5673 return yield(pc, &key, value);
5676 OPTBLD_INLINE bool typeIsValidGeneratorDelegate(DataType type) {
5677 return type == KindOfArray ||
5678 type == KindOfPersistentArray ||
5679 type == KindOfObject;
5682 OPTBLD_INLINE void iopContAssignDelegate(Iter* iter) {
5683 auto param = *vmStack().topC();
5684 vmStack().discard();
5685 auto gen = frame_generator(vmfp());
5686 if (UNLIKELY(!typeIsValidGeneratorDelegate(param.m_type))) {
5687 tvRefcountedDecRef(param);
5688 SystemLib::throwErrorObject(
5689 "Can use \"yield from\" only with arrays and Traversables"
5693 // We don't use the iterator if we have a delegate generator (as iterators
5694 // mess with the internal state of the generator), so short circuit and dont
5695 // init our iterator in that case. Otherwise, if we init our iterator and it
5696 // returns false then we know that we have an empty iterator (like `[]`) in
5697 // which case just set our delegate to Null so that ContEnterDelegate and
5698 // YieldFromDelegate know something is up.
5699 if (tvIsGenerator(param) || iter->init(&param)) {
5700 cellSet(param, gen->m_delegate);
5701 } else {
5702 cellSetNull(gen->m_delegate);
5704 // When using a subgenerator we don't actually read the values of the m_key
5705 // and m_value of our frame generator (the delegating generator). The
5706 // generator itself is still holding a reference to them though, so null
5707 // out the key/value to free the memory.
5708 cellSetNull(gen->m_key);
5709 cellSetNull(gen->m_value);
5712 OPTBLD_INLINE void iopContEnterDelegate(PC& pc) {
5713 // Make sure we have a delegate
5714 auto gen = frame_generator(vmfp());
5716 // Ignore the VM Stack, we want to pass that down from ContEnter
5718 // ContEnterDelegate doesn't do anything for iterators.
5719 if (!tvIsGenerator(gen->m_delegate)) {
5720 return;
5723 auto delegate = Generator::fromObject(gen->m_delegate.m_data.pobj);
5725 if (delegate->getState() == BaseGenerator::State::Done) {
5726 // If our generator finished earlier (or if there was nothing to do) just
5727 // continue on and let YieldFromDelegate handle cleaning up.
5728 return;
5731 // A pretty odd if statement, but consider the following situation.
5732 // Generators A and B both do `yield from` on a shared delegate generator,
5733 // C. When A is first used we autoprime it, and therefore also autoprime C as
5734 // well. Then we also autoprime B when it gets used, which advances C past
5735 // some perfectly valid data.
5736 // Basically this check is to make sure that we autoprime delegate generators
5737 // when needed, and not if they're shared.
5738 if (gen->getState() == BaseGenerator::State::Priming &&
5739 delegate->getState() != BaseGenerator::State::Created) {
5740 return;
5743 // We're about to resume executing our generator, so make sure we're in the
5744 // right state.
5745 delegate->preNext(false);
5747 moveProgramCounterIntoGenerator(pc, delegate);
5748 EventHook::FunctionResumeYield(vmfp());
5751 OPTBLD_INLINE
5752 TCA yieldFromGenerator(PC& pc, Generator* gen, Offset resumeOffset) {
5753 auto fp = vmfp();
5755 assert(tvIsGenerator(gen->m_delegate));
5756 auto delegate = Generator::fromObject(gen->m_delegate.m_data.pobj);
5758 if (delegate->getState() == BaseGenerator::State::Done) {
5759 // If the generator is done, just copy the return value onto the stack.
5760 cellDup(delegate->m_value, *vmStack().topTV());
5761 return nullptr;
5764 auto jitReturn = jitReturnPre(fp);
5766 EventHook::FunctionSuspendR(fp, nullptr);
5767 // We don't actually want to "yield" anything here. The implementation of
5768 // key/current are smart enough to dive into our delegate generator, so
5769 // really what we want to do is clean up all of the generator metadata
5770 // (state, ressume address, etc) and continue on.
5771 assert(gen->isRunning());
5772 gen->resumable()->setResumeAddr(nullptr, resumeOffset);
5773 gen->setState(BaseGenerator::State::Started);
5775 moveProgramCounterToCaller(pc);
5777 return jitReturnPost(jitReturn);
5780 OPTBLD_INLINE
5781 TCA yieldFromIterator(PC& pc, Generator* gen, Iter* it, Offset resumeOffset) {
5782 auto fp = vmfp();
5784 // For the most part this should never happen, the emitter assigns our
5785 // delegate to a non-null value in ContAssignDelegate. The one exception to
5786 // this is if we are given an empty iterator, in which case
5787 // ContAssignDelegate will remove our delegate and just send us to
5788 // YieldFromDelegate to return our null.
5789 if (UNLIKELY(gen->m_delegate.m_type == KindOfNull)) {
5790 tvWriteNull(vmStack().topTV());
5791 return nullptr;
5794 // Otherwise, if iteration is finished we just return null.
5795 auto arr = it->arr();
5796 if (arr.end()) {
5797 // Push our null return value onto the stack
5798 tvWriteNull(vmStack().topTV());
5799 return nullptr;
5802 auto jitReturn = jitReturnPre(fp);
5804 EventHook::FunctionSuspendR(fp, nullptr);
5805 auto key = *(arr.first().asTypedValue());
5806 auto value = *(arr.second().asTypedValue());
5807 gen->yield(resumeOffset, &key, value);
5809 moveProgramCounterToCaller(pc);
5811 it->next();
5813 return jitReturnPost(jitReturn);
5816 OPTBLD_INLINE TCA iopYieldFromDelegate(PC& pc, Iter* it, PC resumePc) {
5817 auto gen = frame_generator(vmfp());
5818 auto func = vmfp()->func();
5819 auto resumeOffset = func->unit()->offsetOf(resumePc);
5820 if (tvIsGenerator(gen->m_delegate)) {
5821 return yieldFromGenerator(pc, gen, resumeOffset);
5823 return yieldFromIterator(pc, gen, it, resumeOffset);
5826 OPTBLD_INLINE void iopContUnsetDelegate(intva_t shouldFreeIter, Iter* iter) {
5827 auto gen = frame_generator(vmfp());
5828 // The `shouldFreeIter` immediate determines whether we need to call free
5829 // on our iterator or not. Normally if we finish executing our yield from
5830 // successfully then the implementation of `next` will automatically do it
5831 // for us when there aren't any elements left, but if an exception is thrown
5832 // then we need to do it manually. We don't use the iterator when the
5833 // delegate is a generator though, so even if the param tells us to free it
5834 // we should just ignore it.
5835 if (UNLIKELY(shouldFreeIter && !tvIsGenerator(gen->m_delegate))) {
5836 iter->free();
5838 cellSetNull(gen->m_delegate);
5841 OPTBLD_INLINE void iopContCheck(intva_t checkStarted) {
5842 this_base_generator(vmfp())->preNext(checkStarted);
5845 OPTBLD_INLINE void iopContValid() {
5846 vmStack().pushBool(
5847 this_generator(vmfp())->getState() != BaseGenerator::State::Done);
5850 OPTBLD_INLINE void iopContStarted() {
5851 vmStack().pushBool(
5852 this_generator(vmfp())->getState() != BaseGenerator::State::Created);
5855 OPTBLD_INLINE Generator *currentlyDelegatedGenerator(Generator *gen) {
5856 while(tvIsGenerator(gen->m_delegate)) {
5857 gen = Generator::fromObject(gen->m_delegate.m_data.pobj);
5859 return gen;
5862 OPTBLD_INLINE void iopContKey() {
5863 Generator* cont = this_generator(vmfp());
5864 if (!RuntimeOption::AutoprimeGenerators) cont->startedCheck();
5866 // If we are currently delegating to a generator, return its key instead
5867 cont = currentlyDelegatedGenerator(cont);
5869 cellDup(cont->m_key, *vmStack().allocC());
5872 OPTBLD_INLINE void iopContCurrent() {
5873 Generator* cont = this_generator(vmfp());
5874 if (!RuntimeOption::AutoprimeGenerators) cont->startedCheck();
5876 // If we are currently delegating to a generator, return its value instead
5877 cont = currentlyDelegatedGenerator(cont);
5879 if(cont->getState() == BaseGenerator::State::Done) {
5880 vmStack().pushNull();
5881 } else {
5882 cellDup(cont->m_value, *vmStack().allocC());
5886 OPTBLD_INLINE void iopContGetReturn() {
5887 Generator* cont = this_generator(vmfp());
5888 if (!RuntimeOption::AutoprimeGenerators) cont->startedCheck();
5890 if(!cont->successfullyFinishedExecuting()) {
5891 SystemLib::throwExceptionObject("Cannot get return value of a generator "
5892 "that hasn't returned");
5895 cellDup(cont->m_value, *vmStack().allocC());
5898 OPTBLD_INLINE void asyncSuspendE(PC& pc) {
5899 assert(!vmfp()->resumed());
5900 assert(vmfp()->func()->isAsyncFunction());
5901 const auto func = vmfp()->m_func;
5902 const auto resumeOffset = func->unit()->offsetOf(pc);
5904 // Pop the blocked dependency.
5905 Cell* value = vmStack().topC();
5906 assert(value->m_type == KindOfObject);
5907 assert(value->m_data.pobj->instanceof(c_WaitableWaitHandle::classof()));
5909 auto child = static_cast<c_WaitableWaitHandle*>(value->m_data.pobj);
5910 assert(!child->isFinished());
5911 vmStack().discard();
5913 // Create the AsyncFunctionWaitHandle object. Create takes care of
5914 // copying local variables and itertors.
5915 auto waitHandle = static_cast<c_AsyncFunctionWaitHandle*>(
5916 c_AsyncFunctionWaitHandle::Create<true>(vmfp(),
5917 vmfp()->func()->numSlotsInFrame(),
5918 nullptr, resumeOffset, child));
5920 // Call the FunctionSuspend hook. FunctionSuspend will decref the newly
5921 // allocated waitHandle if it throws.
5922 EventHook::FunctionSuspendE(vmfp(), waitHandle->actRec());
5924 // Grab caller info from ActRec.
5925 ActRec* sfp = vmfp()->sfp();
5926 Offset soff = vmfp()->m_soff;
5928 // Free ActRec and store the return value.
5929 vmStack().ndiscard(vmfp()->m_func->numSlotsInFrame());
5930 vmStack().ret();
5931 tvCopy(make_tv<KindOfObject>(waitHandle), *vmStack().topTV());
5932 assert(vmStack().topTV() == vmfp()->retSlot());
5933 // In case we were called by a jitted FCallAwait, let it know
5934 // that we suspended.
5935 vmStack().topTV()->m_aux.u_fcallAwaitFlag = 1;
5936 // Return control to the caller.
5937 vmfp() = sfp;
5938 pc = LIKELY(vmfp() != nullptr) ?
5939 vmfp()->func()->getEntry() + soff : nullptr;
5942 OPTBLD_INLINE void asyncSuspendR(PC& pc) {
5943 auto const fp = vmfp();
5944 auto const func = fp->func();
5945 auto const resumeOffset = func->unit()->offsetOf(pc);
5946 assert(fp->resumed());
5947 assert(func->isAsync());
5949 // Obtain child
5950 Cell& value = *vmStack().topC();
5951 assert(value.m_type == KindOfObject);
5952 assert(value.m_data.pobj->instanceof(c_WaitableWaitHandle::classof()));
5953 auto const child = static_cast<c_WaitableWaitHandle*>(value.m_data.pobj);
5955 // Before adjusting the stack or doing anything, check the suspend hook.
5956 // This can throw.
5957 EventHook::FunctionSuspendR(fp, child);
5959 // Await child and suspend the async function/generator. May throw.
5960 if (!func->isGenerator()) {
5961 // Async function.
5962 assert(!fp->sfp());
5963 frame_afwh(fp)->await(resumeOffset, child);
5964 vmStack().discard();
5965 } else {
5966 // Async generator.
5967 auto const gen = frame_async_generator(fp);
5968 auto const eagerResult = gen->await(resumeOffset, child);
5969 vmStack().discard();
5970 if (eagerResult) {
5971 // Eager execution => return AsyncGeneratorWaitHandle.
5972 assert(fp->sfp());
5973 vmStack().pushObjectNoRc(eagerResult);
5974 } else {
5975 // Resumed execution => return control to the scheduler.
5976 assert(!fp->sfp());
5980 // Grab caller info from ActRec.
5981 ActRec* sfp = fp->sfp();
5982 Offset soff = fp->m_soff;
5984 // Return control to the caller or scheduler.
5985 vmfp() = sfp;
5986 pc = sfp != nullptr ? sfp->func()->getEntry() + soff : nullptr;
5989 OPTBLD_INLINE TCA iopAwait(PC& pc) {
5990 auto const awaitable = vmStack().topC();
5991 auto wh = c_WaitHandle::fromCell(awaitable);
5992 if (UNLIKELY(wh == nullptr)) {
5993 if (LIKELY(awaitable->m_type == KindOfObject)) {
5994 auto const obj = awaitable->m_data.pobj;
5995 auto const cls = obj->getVMClass();
5996 auto const func = cls->lookupMethod(s_getWaitHandle.get());
5997 if (func && !(func->attrs() & AttrStatic)) {
5998 auto ret = Variant::attach(
5999 g_context->invokeFuncFew(func, obj, nullptr, 0, nullptr)
6001 cellSet(*tvToCell(ret.asTypedValue()), *vmStack().topC());
6002 wh = c_WaitHandle::fromCell(vmStack().topC());
6006 if (UNLIKELY(wh == nullptr)) {
6007 SystemLib::throwBadMethodCallExceptionObject("Await on a non-WaitHandle");
6010 if (LIKELY(wh->isFailed())) {
6011 throw req::root<Object>{wh->getException()};
6013 if (wh->isSucceeded()) {
6014 cellSet(wh->getResult(), *vmStack().topC());
6015 return nullptr;
6017 return suspendStack(pc);
6020 TCA suspendStack(PC &pc) {
6021 while (true) {
6022 auto const jitReturn = [&] {
6023 try {
6024 return jitReturnPre(vmfp());
6025 } catch (VMSwitchMode&) {
6026 vmpc() = pc;
6027 throw VMSuspendStack();
6028 } catch (...) {
6029 // We're halfway through a bytecode; we can't recover
6030 always_assert(false);
6032 }();
6034 if (vmfp()->resumed()) {
6035 // suspend resumed execution
6036 asyncSuspendR(pc);
6037 return jitReturnPost(jitReturn);
6040 auto const suspendOuter = vmfp()->isFCallAwait();
6041 assertx(jitReturn.sfp || !suspendOuter);
6043 // suspend eager execution
6044 asyncSuspendE(pc);
6046 auto retIp = jitReturnPost(jitReturn);
6047 if (!suspendOuter) return retIp;
6048 if (retIp) {
6049 auto const& us = jit::tc::ustubs();
6050 if (retIp == us.resumeHelper) retIp = us.fcallAwaitSuspendHelper;
6051 return retIp;
6053 vmpc() = pc;
6057 OPTBLD_INLINE void iopWHResult() {
6058 // we should never emit this bytecode for non-waithandle
6059 auto const wh = c_WaitHandle::fromCellAssert(vmStack().topC());
6060 // the failure condition is likely since we punt to this opcode
6061 // in the JIT when the state is failed.
6062 if (wh->isFailed()) {
6063 throw_object(Object{wh->getException()});
6065 if (wh->isSucceeded()) {
6066 cellSet(wh->getResult(), *vmStack().topC());
6067 return;
6069 SystemLib::throwInvalidOperationExceptionObject(
6070 "Request for result on pending wait handle, "
6071 "must await or join() before calling result()");
6072 not_reached();
6075 OPTBLD_INLINE void iopCheckProp(const StringData* propName) {
6076 auto* cls = vmfp()->getClass();
6077 auto* propVec = cls->getPropData();
6078 always_assert(propVec);
6080 auto* ctx = arGetContextClass(vmfp());
6081 auto idx = ctx->lookupDeclProp(propName);
6083 auto& tv = (*propVec)[idx];
6084 vmStack().pushBool(tv.m_type != KindOfUninit);
6087 OPTBLD_INLINE void iopInitProp(const StringData* propName, InitPropOp propOp) {
6088 auto* cls = vmfp()->getClass();
6089 TypedValue* tv;
6091 auto* ctx = arGetContextClass(vmfp());
6092 auto* fr = vmStack().topC();
6094 switch (propOp) {
6095 case InitPropOp::Static:
6096 tv = cls->getSPropData(ctx->lookupSProp(propName));
6097 break;
6099 case InitPropOp::NonStatic: {
6100 auto* propVec = cls->getPropData();
6101 always_assert(propVec);
6102 Slot idx = ctx->lookupDeclProp(propName);
6103 tv = &(*propVec)[idx];
6104 } break;
6107 cellDup(*fr, *tvToCell(tv));
6108 vmStack().popC();
6111 OPTBLD_INLINE void iopIncStat(intva_t counter, intva_t value) {
6112 Stats::inc(Stats::StatCounter(counter.n), value);
6115 OPTBLD_INLINE void iopOODeclExists(OODeclExistsOp subop) {
6116 TypedValue* aloadTV = vmStack().topTV();
6117 tvCastToBooleanInPlace(aloadTV);
6118 assert(aloadTV->m_type == KindOfBoolean);
6119 bool autoload = aloadTV->m_data.num;
6120 vmStack().popX();
6122 TypedValue* name = vmStack().topTV();
6123 tvCastToStringInPlace(name);
6124 assert(isStringType(name->m_type));
6126 ClassKind kind;
6127 switch (subop) {
6128 case OODeclExistsOp::Class : kind = ClassKind::Class; break;
6129 case OODeclExistsOp::Trait : kind = ClassKind::Trait; break;
6130 case OODeclExistsOp::Interface : kind = ClassKind::Interface; break;
6132 tvAsVariant(name) = Unit::classExists(name->m_data.pstr, autoload, kind);
6135 OPTBLD_INLINE void iopSilence(local_var loc, SilenceOp subop) {
6136 switch (subop) {
6137 case SilenceOp::Start:
6138 loc.ptr->m_type = KindOfInt64;
6139 loc.ptr->m_data.num = zero_error_level();
6140 break;
6141 case SilenceOp::End:
6142 assert(loc.ptr->m_type == KindOfInt64);
6143 restore_error_level(loc.ptr->m_data.num);
6144 break;
6148 std::string prettyStack(const std::string& prefix) {
6149 if (!vmfp()) return "__Halted";
6150 int offset = (vmfp()->m_func->unit() != nullptr)
6151 ? pcOff() : 0;
6152 auto begPrefix = prefix + "__";
6153 auto midPrefix = prefix + "|| ";
6154 auto endPrefix = prefix + "\\/";
6155 auto stack = vmStack().toString(vmfp(), offset, midPrefix);
6156 return begPrefix + "\n" + stack + endPrefix;
6159 // callable from gdb
6160 void DumpStack() {
6161 fprintf(stderr, "%s\n", prettyStack("").c_str());
6164 // callable from gdb
6165 void DumpCurUnit(int skip) {
6166 ActRec* fp = vmfp();
6167 Offset pc = fp->m_func->unit() ? pcOff() : 0;
6168 while (skip--) {
6169 fp = g_context->getPrevVMState(fp, &pc);
6171 if (fp == nullptr) {
6172 std::cout << "Don't have a valid fp\n";
6173 return;
6176 printf("Offset = %d, in function %s\n", pc, fp->m_func->name()->data());
6177 Unit* u = fp->m_func->unit();
6178 if (u == nullptr) {
6179 std::cout << "Current unit is NULL\n";
6180 return;
6182 printf("Dumping bytecode for %s(%p)\n", u->filepath()->data(), u);
6183 std::cout << u->toString();
6186 // callable from gdb
6187 void PrintTCCallerInfo() {
6188 VMRegAnchor _;
6190 auto const u = vmfp()->m_func->unit();
6191 auto const rip = []() -> jit::TCA {
6192 DECLARE_FRAME_POINTER(reg_fp);
6193 // NB: We can't directly mutate the register-mapped `reg_fp'.
6194 for (ActRec* fp = reg_fp; fp; fp = fp->m_sfp) {
6195 auto const rip = jit::TCA(fp->m_savedRip);
6196 if (jit::tc::isValidCodeAddress(rip)) return rip;
6198 return nullptr;
6199 }();
6201 fprintf(stderr, "Called from TC address %p\n", rip);
6202 std::cerr << u->filepath()->data() << ':'
6203 << u->getLineNumber(u->offsetOf(vmpc())) << '\n';
6206 // thread-local cached coverage info
6207 static __thread Unit* s_prev_unit;
6208 static __thread int s_prev_line;
6210 void recordCodeCoverage(PC pc) {
6211 Unit* unit = vmfp()->m_func->unit();
6212 assert(unit != nullptr);
6213 if (unit == SystemLib::s_nativeFuncUnit ||
6214 unit == SystemLib::s_nativeClassUnit ||
6215 unit == SystemLib::s_hhas_unit) {
6216 return;
6218 int line = unit->getLineNumber(pcOff());
6219 assert(line != -1);
6221 if (unit != s_prev_unit || line != s_prev_line) {
6222 s_prev_unit = unit;
6223 s_prev_line = line;
6224 const StringData* filepath = unit->filepath();
6225 assert(filepath->isStatic());
6226 TI().m_coverage->Record(filepath->data(), line, line);
6230 void resetCoverageCounters() {
6231 s_prev_line = -1;
6232 s_prev_unit = nullptr;
6235 static inline void
6236 condStackTraceSep(Op opcode) {
6237 TRACE(3, "%s "
6238 "========================================"
6239 "========================================\n",
6240 opcodeToName(opcode));
6243 #define COND_STACKTRACE(pfx)\
6244 ONTRACE(3, auto stack = prettyStack(pfx);\
6245 Trace::trace("%s\n", stack.c_str());)
6248 * iopWrapper is used to normalize the calling convention for the iop*
6249 * functions, since some return void and some return TCA. Any functions that
6250 * return void are treated as though they returned nullptr.
6252 OPTBLD_INLINE static
6253 TCA iopWrapper(Op, void(*fn)(PC&), PC& pc) {
6254 fn(pc);
6255 return nullptr;
6258 OPTBLD_INLINE static
6259 TCA iopWrapper(Op, TCA(*fn)(PC& pc), PC& pc) {
6260 return fn(pc);
6263 OPTBLD_INLINE static
6264 TCA iopWrapper(Op, void(*fn)(), PC& pc) {
6265 fn();
6266 return nullptr;
6269 OPTBLD_INLINE static
6270 TCA iopWrapper(Op, void(*fn)(const StringData*), PC& pc) {
6271 auto s = decode_litstr(pc);
6272 fn(s);
6273 return nullptr;
6276 OPTBLD_INLINE static
6277 TCA iopWrapper(Op, void(*fn)(const StringData*,const StringData*), PC& pc) {
6278 auto s1 = decode_litstr(pc);
6279 auto s2 = decode_litstr(pc);
6280 fn(s1, s2);
6281 return nullptr;
6284 OPTBLD_INLINE static
6285 TCA iopWrapper(Op, void(*fn)(const StringData*,clsref_slot), PC& pc) {
6286 auto s1 = decode_litstr(pc);
6287 auto s = decode_clsref_slot(pc);
6288 fn(s1, s);
6289 return nullptr;
6292 OPTBLD_INLINE static
6293 TCA iopWrapper(Op, void(*fn)(local_var), PC& pc) {
6294 auto var = decode_local(pc);
6295 fn(var);
6296 return nullptr;
6299 OPTBLD_INLINE static
6300 TCA iopWrapper(Op, void(*fn)(local_var,local_var), PC& pc) {
6301 auto var1 = decode_local(pc);
6302 auto var2 = decode_local(pc);
6303 fn(var1, var2);
6304 return nullptr;
6307 OPTBLD_INLINE static
6308 TCA iopWrapper(Op, void(*fn)(local_var,clsref_slot), PC& pc) {
6309 auto var1 = decode_local(pc);
6310 auto s = decode_clsref_slot(pc);
6311 fn(var1, s);
6312 return nullptr;
6315 OPTBLD_INLINE static
6316 TCA iopWrapper(Op, void(*fn)(intva_t), PC& pc) {
6317 auto n = decode_intva(pc);
6318 fn(n);
6319 return nullptr;
6322 OPTBLD_INLINE static
6323 TCA iopWrapper(Op, void(*fn)(intva_t,intva_t), PC& pc) {
6324 auto n1 = decode_intva(pc);
6325 auto n2 = decode_intva(pc);
6326 fn(n1, n2);
6327 return nullptr;
6330 OPTBLD_INLINE static
6331 TCA iopWrapper(Op, void(*fn)(intva_t,clsref_slot), PC& pc) {
6332 auto n1 = decode_intva(pc);
6333 auto s = decode_clsref_slot(pc);
6334 fn(n1, s);
6335 return nullptr;
6338 OPTBLD_INLINE static
6339 TCA iopWrapper(Op, void(*fn)(intva_t,LocalRange), PC& pc) {
6340 auto n1 = decode_intva(pc);
6341 auto n2 = decodeLocalRange(pc);
6342 fn(n1, n2);
6343 return nullptr;
6346 OPTBLD_INLINE static
6347 TCA iopWrapper(Op, void(*fn)(clsref_slot), PC& pc) {
6348 auto s = decode_clsref_slot(pc);
6349 fn(s);
6350 return nullptr;
6353 OPTBLD_INLINE static
6354 TCA iopWrapper(Op, void(*fn)(int32_t), PC& pc) {
6355 auto n = decode<int32_t>(pc);
6356 fn(n);
6357 return nullptr;
6360 OPTBLD_INLINE static
6361 TCA iopWrapper(Op, void(*fn)(int32_t,imm_array<int32_t>), PC& pc) {
6362 auto n = decode<int32_t>(pc);
6363 auto v = imm_array<int32_t>(pc);
6364 pc += n * sizeof(int32_t);
6365 fn(n, v);
6366 return nullptr;
6369 OPTBLD_INLINE static
6370 TCA iopWrapper(Op op, void(*fn)(ActRec*,intva_t,intva_t), PC& pc) {
6371 auto ar = arFromInstr(pc - encoded_op_size(op));
6372 auto n1 = decode_intva(pc);
6373 auto n2 = decode_intva(pc);
6374 fn(ar, n1, n2);
6375 return nullptr;
6378 OPTBLD_INLINE static
6379 TCA iopWrapper(Op op, void(*fn)(ActRec*,intva_t,local_var), PC& pc) {
6380 auto ar = arFromInstr(pc - encoded_op_size(op));
6381 auto n = decode_intva(pc);
6382 auto var = decode_local(pc);
6383 fn(ar, n, var);
6384 return nullptr;
6387 OPTBLD_INLINE static
6388 TCA iopWrapper(Op op, void(*fn)(intva_t,local_var), PC& pc) {
6389 auto n = decode_intva(pc);
6390 auto var = decode_local(pc);
6391 fn(n, var);
6392 return nullptr;
6395 OPTBLD_INLINE static
6396 TCA iopWrapper(Op op, void(*fn)(PC&,intva_t), PC& pc) {
6397 auto n = decode_intva(pc);
6398 fn(pc, n);
6399 return nullptr;
6402 OPTBLD_INLINE static
6403 TCA iopWrapper(Op op, void(*fn)(intva_t,const StringData*,Id), PC& pc) {
6404 auto n = decode_intva(pc);
6405 auto s = decode_litstr(pc);
6406 auto i = decode<Id>(pc);
6407 fn(n, s, i);
6408 return nullptr;
6411 OPTBLD_INLINE static
6412 TCA iopWrapper(Op, void(*fn)(int64_t), PC& pc) {
6413 auto imm = decode<int64_t>(pc);
6414 fn(imm);
6415 return nullptr;
6418 OPTBLD_INLINE static
6419 TCA iopWrapper(Op, void(*fn)(double), PC& pc) {
6420 auto imm = decode<double>(pc);
6421 fn(imm);
6422 return nullptr;
6425 OPTBLD_INLINE static
6426 TCA iopWrapper(Op, void(*fn)(FatalOp), PC& pc) {
6427 auto imm = decode_oa<FatalOp>(pc);
6428 fn(imm);
6429 return nullptr;
6432 OPTBLD_INLINE static
6433 TCA iopWrapper(Op, void(*fn)(IsTypeOp), PC& pc) {
6434 auto subop = decode_oa<IsTypeOp>(pc);
6435 fn(subop);
6436 return nullptr;
6439 OPTBLD_INLINE static
6440 TCA iopWrapper(Op, void(*fn)(local_var,IsTypeOp), PC& pc) {
6441 auto var = decode_local(pc);
6442 auto subop = decode_oa<IsTypeOp>(pc);
6443 fn(var, subop);
6444 return nullptr;
6447 OPTBLD_INLINE static
6448 TCA iopWrapper(Op, void(*fn)(SetOpOp), PC& pc) {
6449 auto subop = decode_oa<SetOpOp>(pc);
6450 fn(subop);
6451 return nullptr;
6454 OPTBLD_INLINE static
6455 TCA iopWrapper(Op, void(*fn)(SetOpOp,clsref_slot), PC& pc) {
6456 auto subop = decode_oa<SetOpOp>(pc);
6457 auto s = decode_clsref_slot(pc);
6458 fn(subop, s);
6459 return nullptr;
6462 OPTBLD_INLINE static
6463 TCA iopWrapper(Op, void(*fn)(local_var,SetOpOp), PC& pc) {
6464 auto var = decode_local(pc);
6465 auto subop = decode_oa<SetOpOp>(pc);
6466 fn(var, subop);
6467 return nullptr;
6470 OPTBLD_INLINE static
6471 TCA iopWrapper(Op, void(*fn)(IncDecOp), PC& pc) {
6472 auto subop = decode_oa<IncDecOp>(pc);
6473 fn(subop);
6474 return nullptr;
6477 OPTBLD_INLINE static
6478 TCA iopWrapper(Op, void(*fn)(IncDecOp,clsref_slot), PC& pc) {
6479 auto subop = decode_oa<IncDecOp>(pc);
6480 auto s = decode_clsref_slot(pc);
6481 fn(subop, s);
6482 return nullptr;
6485 OPTBLD_INLINE static
6486 TCA iopWrapper(Op, void(*fn)(local_var,IncDecOp), PC& pc) {
6487 auto var = decode_local(pc);
6488 auto subop = decode_oa<IncDecOp>(pc);
6489 fn(var, subop);
6490 return nullptr;
6493 OPTBLD_INLINE static
6494 TCA iopWrapper(Op, void(*fn)(BareThisOp), PC& pc) {
6495 auto subop = decode_oa<BareThisOp>(pc);
6496 fn(subop);
6497 return nullptr;
6500 OPTBLD_INLINE static
6501 TCA iopWrapper(Op, void(*fn)(OODeclExistsOp), PC& pc) {
6502 auto subop = decode_oa<OODeclExistsOp>(pc);
6503 fn(subop);
6504 return nullptr;
6507 OPTBLD_INLINE static
6508 TCA iopWrapper(Op, void(*fn)(local_var,SilenceOp), PC& pc) {
6509 auto var = decode_local(pc);
6510 auto subop = decode_oa<SilenceOp>(pc);
6511 fn(var, subop);
6512 return nullptr;
6515 OPTBLD_INLINE static
6516 TCA iopWrapper(Op, void(*fn)(const ArrayData*), PC& pc) {
6517 auto id = decode<Id>(pc);
6518 auto a = vmfp()->m_func->unit()->lookupArrayId(id);
6519 fn(a);
6520 return nullptr;
6523 OPTBLD_INLINE static
6524 TCA iopWrapper(Op, void(*fn)(local_var,intva_t), PC& pc) {
6525 auto var1 = decode_local(pc);
6526 auto imm2 = decode_intva(pc);
6527 fn(var1, imm2);
6528 return nullptr;
6531 OPTBLD_INLINE static
6532 TCA iopWrapper(Op, void(*fn)(Iter*), PC& pc) {
6533 auto iter = decode_iter(pc);
6534 fn(iter);
6535 return nullptr;
6538 OPTBLD_INLINE static
6539 TCA iopWrapper(Op, void(*fn)(intva_t,Iter*), PC& pc) {
6540 auto n = decode_intva(pc);
6541 auto iter = decode_iter(pc);
6542 fn(n, iter);
6543 return nullptr;
6546 OPTBLD_INLINE static
6547 TCA iopWrapper(Op op, void(*fn)(PC&,Iter*,PC), PC& pc) {
6548 auto origpc = pc - encoded_op_size(op);
6549 auto iter = decode_iter(pc);
6550 auto targetpc = origpc + decode_ba(pc);
6551 fn(pc, iter, targetpc);
6552 return nullptr;
6555 OPTBLD_INLINE static
6556 TCA iopWrapper(Op op, void(*fn)(PC&,Iter*,PC,local_var), PC& pc) {
6557 auto origpc = pc - encoded_op_size(op);
6558 auto iter = decode_iter(pc);
6559 auto targetpc = origpc + decode_ba(pc);
6560 auto var = decode_local(pc);
6561 fn(pc, iter, targetpc, var);
6562 return nullptr;
6565 OPTBLD_INLINE static
6566 TCA iopWrapper(Op op, void(*fn)(PC&,Iter*,PC,local_var,local_var), PC& pc) {
6567 auto origpc = pc - encoded_op_size(op);
6568 auto iter = decode_iter(pc);
6569 auto targetpc = origpc + decode_ba(pc);
6570 auto var1 = decode_local(pc);
6571 auto var2 = decode_local(pc);
6572 fn(pc, iter, targetpc, var1, var2);
6573 return nullptr;
6576 OPTBLD_INLINE static
6577 TCA iopWrapper(Op op, TCA(*fn)(PC&,Iter*,PC), PC& pc) {
6578 auto origpc = pc - encoded_op_size(op);
6579 auto iter = decode_iter(pc);
6580 auto targetpc = origpc + decode_ba(pc);
6581 return fn(pc, iter, targetpc);
6584 OPTBLD_INLINE static TCA iopWrapper(Op op, void(*fn)(PC&,PC), PC& pc) {
6585 auto origpc = pc - encoded_op_size(op);
6586 auto targetpc = origpc + decode_ba(pc);
6587 fn(pc, targetpc);
6588 return nullptr;
6591 OPTBLD_INLINE static
6592 TCA iopWrapper(Op op, void(*fn)(PC&,PC,int,imm_array<IterBreakElem>),
6593 PC& pc) {
6594 auto origpc = pc - encoded_op_size(op);
6595 auto targetpc = origpc + decode_ba(pc);
6596 auto n = decode<int32_t>(pc);
6597 auto v = imm_array<IterBreakElem>(pc);
6598 pc += n * sizeof(IterBreakElem);
6599 fn(pc, targetpc, n, v);
6600 return nullptr;
6603 OPTBLD_INLINE static
6604 TCA iopWrapper(Op op, void(*fn)(intva_t,MOpMode), PC& pc) {
6605 auto n = decode_intva(pc);
6606 auto mode = decode<MOpMode>(pc);
6607 fn(n, mode);
6608 return nullptr;
6611 OPTBLD_INLINE static
6612 TCA iopWrapper(Op op, void(*fn)(local_var,MOpMode), PC& pc) {
6613 auto local = decode_local(pc);
6614 auto mode = decode<MOpMode>(pc);
6615 fn(local, mode);
6616 return nullptr;
6619 OPTBLD_INLINE static
6620 TCA iopWrapper(Op op, void(*fn)(const StringData*, int32_t), PC& pc) {
6621 auto str = decode_litstr(pc);
6622 auto n = decode<int32_t>(pc);
6623 fn(str, n);
6624 return nullptr;
6627 OPTBLD_INLINE static
6628 TCA iopWrapper(Op op, void(*fn)(MOpMode,MemberKey), PC& pc) {
6629 auto mode = decode_oa<MOpMode>(pc);
6630 auto mk = decode_member_key(pc, liveUnit());
6631 fn(mode, mk);
6632 return nullptr;
6635 OPTBLD_INLINE static
6636 TCA iopWrapper(Op op, void(*fn)(intva_t,MemberKey), PC& pc) {
6637 auto n = decode_intva(pc);
6638 auto mk = decode_member_key(pc, liveUnit());
6639 fn(n, mk);
6640 return nullptr;
6643 OPTBLD_INLINE static
6644 TCA iopWrapper(Op op, void(*fn)(ActRec*,intva_t,MemberKey), PC& pc) {
6645 auto ar = arFromInstr(pc - encoded_op_size(op));
6646 auto n = decode_intva(pc);
6647 auto mk = decode_member_key(pc, liveUnit());
6648 fn(ar, n, mk);
6649 return nullptr;
6652 OPTBLD_INLINE static
6653 TCA iopWrapper(Op, void(*fn)(intva_t,intva_t,MemberKey), PC& pc) {
6654 auto n1 = decode_intva(pc);
6655 auto n2 = decode_intva(pc);
6656 auto mk = decode_member_key(pc, liveUnit());
6657 fn(n1, n2, mk);
6658 return nullptr;
6661 OPTBLD_INLINE static
6662 TCA iopWrapper(Op op, void(*fn)(intva_t,QueryMOp,MemberKey), PC& pc) {
6663 auto n = decode_intva(pc);
6664 auto subop = decode_oa<QueryMOp>(pc);
6665 auto mk = decode_member_key(pc, liveUnit());
6666 fn(n, subop, mk);
6667 return nullptr;
6670 OPTBLD_INLINE static
6671 TCA iopWrapper(Op op, void(*fn)(intva_t,IncDecOp,MemberKey), PC& pc) {
6672 auto n = decode_intva(pc);
6673 auto subop = decode_oa<IncDecOp>(pc);
6674 auto mk = decode_member_key(pc, liveUnit());
6675 fn(n, subop, mk);
6676 return nullptr;
6679 OPTBLD_INLINE static
6680 TCA iopWrapper(Op op, void(*fn)(intva_t,SetOpOp,MemberKey), PC& pc) {
6681 auto n = decode_intva(pc);
6682 auto subop = decode_oa<SetOpOp>(pc);
6683 auto mk = decode_member_key(pc, liveUnit());
6684 fn(n, subop, mk);
6685 return nullptr;
6688 OPTBLD_INLINE static
6689 TCA iopWrapper(Op op, void(*fn)(local_var,RepoAuthType), PC& pc) {
6690 auto var = decode_local(pc);
6691 if (debug) {
6692 auto rat = decodeRAT(vmfp()->m_func->unit(), pc);
6693 fn(var, rat);
6694 } else {
6695 RepoAuthType rat; pc += encodedRATSize(pc);
6696 fn(var, rat);
6698 return nullptr;
6701 OPTBLD_INLINE static
6702 TCA iopWrapper(Op op, void(*fn)(intva_t,RepoAuthType), PC& pc) {
6703 auto n = decode_intva(pc);
6704 if (debug) {
6705 auto rat = decodeRAT(vmfp()->m_func->unit(), pc);
6706 fn(n, rat);
6707 } else {
6708 RepoAuthType rat; pc += encodedRATSize(pc);
6709 fn(n, rat);
6711 return nullptr;
6714 OPTBLD_INLINE static
6715 TCA iopWrapper(Op op, void(*fn)(intva_t,int), PC& pc) {
6716 auto n = decode_intva(pc);
6717 auto sa = decode<int>(pc);
6718 fn(n, sa);
6719 return nullptr;
6722 OPTBLD_INLINE static
6723 TCA iopWrapper(Op op, void(*fn)(intva_t,intva_t,int), PC& pc) {
6724 auto n1 = decode_intva(pc);
6725 auto n2 = decode_intva(pc);
6726 auto sa = decode<int>(pc);
6727 fn(n1, n2, sa);
6728 return nullptr;
6731 OPTBLD_INLINE static
6732 TCA iopWrapper(Op op, void(*fn)(intva_t,int,int), PC& pc) {
6733 auto n = decode_intva(pc);
6734 auto sa1 = decode<int>(pc);
6735 auto sa2 = decode<int>(pc);
6736 fn(n, sa1, sa2);
6737 return nullptr;
6740 OPTBLD_INLINE static
6741 TCA iopWrapper(Op op, void(*fn)(intva_t,ObjMethodOp), PC& pc) {
6742 auto n = decode_intva(pc);
6743 auto subop = decode_oa<ObjMethodOp>(pc);
6744 fn(n, subop);
6745 return nullptr;
6748 OPTBLD_INLINE static TCA
6749 iopWrapper(Op op, void(*fn)(intva_t,const StringData*,ObjMethodOp), PC& pc) {
6750 auto n = decode_intva(pc);
6751 auto s = decode_litstr(pc);
6752 auto subop = decode_oa<ObjMethodOp>(pc);
6753 fn(n, s, subop);
6754 return nullptr;
6757 OPTBLD_INLINE static
6758 TCA iopWrapper(Op op,
6759 void(*fn)(PC&,intva_t,const StringData*,const StringData*), PC& pc) {
6760 auto n = decode_intva(pc);
6761 auto s1 = decode_litstr(pc);
6762 auto s2 = decode_litstr(pc);
6763 fn(pc, n, s1, s2);
6764 return nullptr;
6767 OPTBLD_INLINE static
6768 TCA iopWrapper(Op op, void(*fn)(local_var,const StringData*), PC& pc) {
6769 auto loc = decode_local(pc);
6770 auto s = decode_litstr(pc);
6771 fn(loc, s);
6772 return nullptr;
6775 OPTBLD_INLINE static
6776 TCA iopWrapper(Op op, void(*fn)(const StringData*,InitPropOp), PC& pc) {
6777 auto s = decode_litstr(pc);
6778 auto subop = decode_oa<InitPropOp>(pc);
6779 fn(s, subop);
6780 return nullptr;
6783 OPTBLD_INLINE static
6784 TCA iopWrapper(Op op,
6785 void(*fn)(PC,PC&,SwitchKind,int64_t,int,imm_array<Offset>),
6786 PC& pc) {
6787 auto origpc = pc - encoded_op_size(op);
6788 auto kind = decode_oa<SwitchKind>(pc);
6789 auto base = decode<int64_t>(pc);
6790 auto n = decode<int32_t>(pc);
6791 auto v = imm_array<Offset>(pc);
6792 pc += n * sizeof(Offset);
6793 fn(origpc, pc, kind, base, n, v);
6794 return nullptr;
6797 OPTBLD_INLINE static
6798 TCA iopWrapper(Op op,
6799 void(*fn)(PC,PC&,int,imm_array<StrVecItem>), PC& pc) {
6800 auto origpc = pc - encoded_op_size(op);
6801 auto n = decode<int32_t>(pc);
6802 auto v = imm_array<StrVecItem>(pc);
6803 pc += n * sizeof(StrVecItem);
6804 fn(origpc, pc, n, v);
6805 return nullptr;
6809 * The interpOne functions are fat wrappers around the iop* functions, mostly
6810 * adding a bunch of debug-only logging and stats tracking.
6812 #define O(opcode, imm, push, pop, flags) \
6813 TCA interpOne##opcode(ActRec* fp, TypedValue* sp, Offset pcOff) { \
6814 interp_set_regs(fp, sp, pcOff); \
6815 SKTRACE(5, liveSK(), \
6816 "%40s %p %p\n", \
6817 "interpOne" #opcode " before (fp,sp)", vmfp(), vmsp()); \
6818 if (Stats::enableInstrCount()) { \
6819 Stats::inc(Stats::Instr_Transl##opcode, -1); \
6820 Stats::inc(Stats::Instr_InterpOne##opcode); \
6822 if (Trace::moduleEnabled(Trace::interpOne, 1)) { \
6823 static const StringData* cat = makeStaticString("interpOne"); \
6824 static const StringData* name = makeStaticString(#opcode); \
6825 Stats::incStatGrouped(cat, name, 1); \
6827 if (Trace::moduleEnabled(Trace::ringbuffer)) { \
6828 auto sk = liveSK().toAtomicInt(); \
6829 Trace::ringbufferEntry(Trace::RBTypeInterpOne, sk, 0); \
6831 INC_TPC(interp_one) \
6832 /* Correct for over-counting in TC-stats. */ \
6833 Stats::inc(Stats::Instr_TC, -1); \
6834 condStackTraceSep(Op##opcode); \
6835 COND_STACKTRACE("op"#opcode" pre: "); \
6836 PC pc = vmpc(); \
6837 ONTRACE(1, auto offset = vmfp()->m_func->unit()->offsetOf(pc); \
6838 Trace::trace("op"#opcode" offset: %d\n", offset)); \
6839 assert(peek_op(pc) == Op::opcode); \
6840 pc += encoded_op_size(Op::opcode); \
6841 auto const retAddr = iopWrapper(Op::opcode, iop##opcode, pc); \
6842 vmpc() = pc; \
6843 COND_STACKTRACE("op"#opcode" post: "); \
6844 condStackTraceSep(Op##opcode); \
6846 * Only set regstate back to dirty if an exception is not
6847 * propagating. If an exception is throwing, regstate for this call
6848 * is actually still correct, and we don't have information in the
6849 * fixup map for interpOne calls anyway.
6850 */ \
6851 tl_regState = VMRegState::DIRTY; \
6852 return retAddr; \
6854 OPCODES
6855 #undef O
6857 InterpOneFunc interpOneEntryPoints[] = {
6858 #define O(opcode, imm, push, pop, flags) &interpOne##opcode,
6859 OPCODES
6860 #undef O
6863 template <bool breakOnCtlFlow>
6864 TCA dispatchImpl() {
6865 // Unfortunately, MSVC doesn't support computed
6866 // gotos, so use a switch instead.
6867 bool collectCoverage = RID().getCoverage();
6869 #ifndef _MSC_VER
6870 static const void *optabDirect[] = {
6871 #define O(name, imm, push, pop, flags) \
6872 &&Label##name,
6873 OPCODES
6874 #undef O
6876 static const void *optabDbg[] = {
6877 #define O(name, imm, push, pop, flags) \
6878 &&LabelDbg##name,
6879 OPCODES
6880 #undef O
6882 static const void *optabCover[] = {
6883 #define O(name, imm, push, pop, flags) \
6884 &&LabelCover##name,
6885 OPCODES
6886 #undef O
6888 assert(sizeof(optabDirect) / sizeof(const void *) == Op_count);
6889 assert(sizeof(optabDbg) / sizeof(const void *) == Op_count);
6890 const void **optab = optabDirect;
6891 if (collectCoverage) {
6892 optab = optabCover;
6894 DEBUGGER_ATTACHED_ONLY(optab = optabDbg);
6895 #endif
6897 bool isCtlFlow = false;
6898 TCA retAddr = nullptr;
6899 Op op;
6901 #ifdef _MSC_VER
6902 # define DISPATCH_ACTUAL() goto DispatchSwitch
6903 #else
6904 # define DISPATCH_ACTUAL() goto *optab[size_t(op)]
6905 #endif
6907 #define DISPATCH() do { \
6908 if (breakOnCtlFlow && isCtlFlow) { \
6909 ONTRACE(1, \
6910 Trace::trace("dispatch: Halt dispatch(%p)\n", \
6911 vmfp())); \
6912 return retAddr; \
6914 opPC = pc; \
6915 op = decode_op(pc); \
6916 COND_STACKTRACE("dispatch: "); \
6917 FTRACE(1, "dispatch: {}: {}\n", pcOff(), \
6918 instrToString(opPC, vmfp()->m_func->unit())); \
6919 DISPATCH_ACTUAL(); \
6920 } while (0)
6922 ONTRACE(1, Trace::trace("dispatch: Enter dispatch(%p)\n",
6923 vmfp()));
6924 PC pc = vmpc();
6925 PC opPC;
6926 DISPATCH();
6928 #define OPCODE_DBG_BODY(name, imm, push, pop, flags) \
6929 phpDebuggerOpcodeHook(opPC)
6930 #define OPCODE_COVER_BODY(name, imm, push, pop, flags) \
6931 if (collectCoverage) { \
6932 recordCodeCoverage(opPC); \
6934 #define OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
6936 if (breakOnCtlFlow && Stats::enableInstrCount()) { \
6937 Stats::inc(Stats::Instr_InterpBB##name); \
6939 retAddr = iopWrapper(Op::name, iop##name, pc); \
6940 vmpc() = pc; \
6941 if (breakOnCtlFlow) { \
6942 isCtlFlow = instrIsControlFlow(Op::name); \
6944 if (instrCanHalt(Op::name) && UNLIKELY(!pc)) { \
6945 vmfp() = nullptr; \
6946 /* We returned from the top VM frame in this nesting level. This means
6947 * m_savedRip in our ActRec must have been callToExit, which should've
6948 * been returned by jitReturnPost(), whether or not we were called from
6949 * the TC. We only actually return callToExit to our caller if that
6950 * caller is dispatchBB(). */ \
6951 assert(retAddr == jit::tc::ustubs().callToExit); \
6952 return breakOnCtlFlow ? retAddr : nullptr; \
6954 assert(isCtlFlow || !retAddr); \
6955 DISPATCH(); \
6958 #ifdef _MSC_VER
6959 DispatchSwitch:
6960 switch (uint8_t(op)) {
6961 #define O(name, imm, push, pop, flags) \
6962 case Op::name: { \
6963 DEBUGGER_ATTACHED_ONLY(OPCODE_DBG_BODY(name, imm, push, pop, flags)); \
6964 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
6965 OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
6967 #else
6968 #define O(name, imm, push, pop, flags) \
6969 LabelDbg##name: \
6970 OPCODE_DBG_BODY(name, imm, push, pop, flags); \
6971 LabelCover##name: \
6972 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
6973 Label##name: \
6974 OPCODE_MAIN_BODY(name, imm, push, pop, flags)
6975 #endif
6977 OPCODES
6979 #ifdef _MSC_VER
6981 #endif
6982 #undef O
6983 #undef DISPATCH
6984 #undef DISPATCH_ACTUAL
6985 #undef OPCODE_DBG_BODY
6986 #undef OPCODE_COVER_BODY
6987 #undef OPCODE_MAIN_BODY
6989 assert(retAddr == nullptr);
6990 return nullptr;
6993 static void dispatch() {
6994 DEBUG_ONLY auto const retAddr = dispatchImpl<false>();
6995 assert(retAddr == nullptr);
6998 // We are about to go back to translated code, check whether we should
6999 // stick with the interpreter. NB: if we've just executed a return
7000 // from pseudomain, then there's no PC and no more code to interpret.
7001 OPTBLD_INLINE TCA switchModeForDebugger(TCA retAddr) {
7002 if (DEBUGGER_FORCE_INTR && (vmpc() != 0)) {
7003 if (retAddr) {
7004 // We just interpreted a bytecode that decided we need to return to an
7005 // address in the TC rather than interpreting up into our caller. This
7006 // means it might not be safe to throw an exception right now (see
7007 // discussion in jitReturnPost). So, resume execution in the TC at a stub
7008 // that will throw the execution from a safe place.
7009 FTRACE(1, "Want to throw VMSwitchMode but retAddr = {}, "
7010 "overriding with throwSwitchMode stub.\n", retAddr);
7011 return jit::tc::ustubs().throwSwitchMode;
7012 } else {
7013 throw VMSwitchMode();
7017 return retAddr;
7020 TCA dispatchBB() {
7021 auto sk = [] {
7022 return SrcKey(vmfp()->func(), vmpc(), vmfp()->resumed(),
7023 vmfp()->func()->cls() && vmfp()->hasThis());
7026 if (Trace::moduleEnabled(Trace::dispatchBB)) {
7027 auto cat = makeStaticString("dispatchBB");
7028 auto name = makeStaticString(show(sk()));
7029 Stats::incStatGrouped(cat, name, 1);
7031 if (Trace::moduleEnabled(Trace::ringbuffer)) {
7032 Trace::ringbufferEntry(Trace::RBTypeDispatchBB, sk().toAtomicInt(), 0);
7034 auto retAddr = dispatchImpl<true>();
7035 return switchModeForDebugger(retAddr);