Fix ResolveClsMethod bug
[hiphop-php.git] / hphp / runtime / vm / bytecode.cpp
blobfbb0a893f9fcb8d01c5099704efd90eb70d919fc
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
18 #include "hphp/runtime/vm/bytecode.h"
20 #include <algorithm>
21 #include <string>
22 #include <vector>
23 #include <sstream>
24 #include <iostream>
25 #include <iomanip>
26 #include <cinttypes>
28 #include <boost/filesystem.hpp>
30 #include <folly/String.h>
31 #include <folly/portability/SysMman.h>
33 #include "hphp/util/debug.h"
34 #include "hphp/util/numa.h"
35 #include "hphp/util/portability.h"
36 #include "hphp/util/ringbuffer.h"
37 #include "hphp/util/text-util.h"
38 #include "hphp/util/trace.h"
40 #include "hphp/system/systemlib.h"
42 #include "hphp/runtime/base/apc-stats.h"
43 #include "hphp/runtime/base/apc-typed-value.h"
44 #include "hphp/runtime/base/array-init.h"
45 #include "hphp/runtime/base/code-coverage.h"
46 #include "hphp/runtime/base/collections.h"
47 #include "hphp/runtime/base/container-functions.h"
48 #include "hphp/runtime/base/enum-util.h"
49 #include "hphp/runtime/base/execution-context.h"
50 #include "hphp/runtime/base/externals.h"
51 #include "hphp/runtime/base/hhprof.h"
52 #include "hphp/runtime/base/memory-manager.h"
53 #include "hphp/runtime/base/mixed-array.h"
54 #include "hphp/runtime/base/object-data.h"
55 #include "hphp/runtime/base/set-array.h"
56 #include "hphp/runtime/base/program-functions.h"
57 #include "hphp/runtime/base/rds.h"
58 #include "hphp/runtime/base/repo-auth-type-codec.h"
59 #include "hphp/runtime/base/runtime-error.h"
60 #include "hphp/runtime/base/runtime-option.h"
61 #include "hphp/runtime/base/stat-cache.h"
62 #include "hphp/runtime/base/stats.h"
63 #include "hphp/runtime/base/strings.h"
64 #include "hphp/runtime/base/type-structure.h"
65 #include "hphp/runtime/base/type-structure-helpers.h"
66 #include "hphp/runtime/base/type-structure-helpers-defs.h"
67 #include "hphp/runtime/base/tv-arith.h"
68 #include "hphp/runtime/base/tv-comparisons.h"
69 #include "hphp/runtime/base/tv-conversions.h"
70 #include "hphp/runtime/base/tv-refcount.h"
71 #include "hphp/runtime/base/tv-type.h"
72 #include "hphp/runtime/base/unit-cache.h"
74 #include "hphp/runtime/ext/array/ext_array.h"
75 #include "hphp/runtime/ext/asio/ext_await-all-wait-handle.h"
76 #include "hphp/runtime/ext/asio/ext_async-function-wait-handle.h"
77 #include "hphp/runtime/ext/asio/ext_async-generator-wait-handle.h"
78 #include "hphp/runtime/ext/asio/ext_async-generator.h"
79 #include "hphp/runtime/ext/asio/ext_static-wait-handle.h"
80 #include "hphp/runtime/ext/asio/ext_wait-handle.h"
81 #include "hphp/runtime/ext/asio/ext_waitable-wait-handle.h"
82 #include "hphp/runtime/ext/std/ext_std_closure.h"
83 #include "hphp/runtime/ext/extension.h"
84 #include "hphp/runtime/ext/generator/ext_generator.h"
85 #include "hphp/runtime/ext/hh/ext_hh.h"
86 #include "hphp/runtime/ext/reflection/ext_reflection.h"
87 #include "hphp/runtime/ext/std/ext_std_variable.h"
88 #include "hphp/runtime/ext/string/ext_string.h"
89 #include "hphp/runtime/ext/hash/hash_murmur.h"
90 #include "hphp/runtime/ext/json/JSON_parser.h"
92 #include "hphp/runtime/server/rpc-request-handler.h"
93 #include "hphp/runtime/server/source-root-info.h"
95 #include "hphp/runtime/vm/act-rec-defs.h"
96 #include "hphp/runtime/vm/act-rec.h"
97 #include "hphp/runtime/vm/class.h"
98 #include "hphp/runtime/vm/class-meth-data-ref.h"
99 #include "hphp/runtime/vm/debug/debug.h"
100 #include "hphp/runtime/vm/debugger-hook.h"
101 #include "hphp/runtime/vm/event-hook.h"
102 #include "hphp/runtime/vm/globals-array.h"
103 #include "hphp/runtime/vm/hh-utils.h"
104 #include "hphp/runtime/vm/hhbc-codec.h"
105 #include "hphp/runtime/vm/hhbc.h"
106 #include "hphp/runtime/vm/interp-helpers.h"
107 #include "hphp/runtime/vm/member-operations.h"
108 #include "hphp/runtime/vm/memo-cache.h"
109 #include "hphp/runtime/vm/method-lookup.h"
110 #include "hphp/runtime/vm/native.h"
111 #include "hphp/runtime/vm/php-debug.h"
112 #include "hphp/runtime/vm/reified-generics.h"
113 #include "hphp/runtime/vm/repo-global-data.h"
114 #include "hphp/runtime/vm/repo.h"
115 #include "hphp/runtime/vm/resumable.h"
116 #include "hphp/runtime/vm/runtime.h"
117 #include "hphp/runtime/vm/srckey.h"
118 #include "hphp/runtime/vm/type-constraint.h"
119 #include "hphp/runtime/vm/type-profile.h"
120 #include "hphp/runtime/vm/unwind.h"
121 #include "hphp/runtime/vm/workload-stats.h"
123 #include "hphp/runtime/vm/jit/code-cache.h"
124 #include "hphp/runtime/vm/jit/debugger.h"
125 #include "hphp/runtime/vm/jit/enter-tc.h"
126 #include "hphp/runtime/vm/jit/perf-counters.h"
127 #include "hphp/runtime/vm/jit/tc.h"
128 #include "hphp/runtime/vm/jit/translator-inline.h"
129 #include "hphp/runtime/vm/jit/translator-runtime.h"
130 #include "hphp/runtime/vm/jit/translator.h"
131 #include "hphp/runtime/vm/jit/unwind-itanium.h"
134 namespace HPHP {
136 TRACE_SET_MOD(bcinterp);
138 // RepoAuthoritative has been raptured out of runtime_option.cpp. It needs
139 // to be closer to other bytecode.cpp data.
140 bool RuntimeOption::RepoAuthoritative = false;
142 using jit::TCA;
144 // GCC 4.8 has some real problems with all the inlining in this file, so don't
145 // go overboard with that version.
146 #if !defined(NDEBUG) || ((__GNUC__ == 4) && (__GNUC_MINOR__ == 8))
147 #define OPTBLD_INLINE
148 #define OPTBLD_FLT_INLINE
149 #else
150 #define OPTBLD_INLINE ALWAYS_INLINE
151 #define OPTBLD_FLT_INLINE INLINE_FLATTEN
152 #endif
154 template <>
155 Class* arGetContextClassImpl<false>(const ActRec* ar) {
156 if (ar == nullptr) {
157 return nullptr;
159 return ar->m_func->cls();
162 template <>
163 Class* arGetContextClassImpl<true>(const ActRec* ar) {
164 if (ar == nullptr) {
165 return nullptr;
167 if (ar->m_func->isPseudoMain() || ar->m_func->isBuiltin()) {
168 // Pseudomains inherit the context of their caller
169 auto const context = g_context.getNoCheck();
170 ar = context->getPrevVMState(ar);
171 while (ar != nullptr &&
172 (ar->m_func->isPseudoMain() || ar->m_func->isBuiltin())) {
173 ar = context->getPrevVMState(ar);
175 if (ar == nullptr) {
176 return nullptr;
179 return ar->m_func->cls();
182 void frame_free_locals_no_hook(ActRec* fp) {
183 frame_free_locals_inl_no_hook(fp, fp->func()->numLocals());
186 const StaticString s___call("__call");
187 const StaticString s___callStatic("__callStatic");
188 const StaticString s_file("file");
189 const StaticString s_line("line");
190 const StaticString s_construct("__construct");
192 ///////////////////////////////////////////////////////////////////////////////
194 //=============================================================================
195 // Miscellaneous decoders.
197 inline const char* prettytype(int) { return "int"; }
198 inline const char* prettytype(long) { return "long"; }
199 inline const char* prettytype(long long) { return "long long"; }
200 inline const char* prettytype(double) { return "double"; }
201 inline const char* prettytype(unsigned) { return "unsigned"; }
202 inline const char* prettytype(OODeclExistsOp) { return "OpDeclExistsOp"; }
203 inline const char* prettytype(FatalOp) { return "FatalOp"; }
204 inline const char* prettytype(IsTypeOp) { return "IsTypeOp"; }
205 inline const char* prettytype(SetOpOp) { return "SetOpOp"; }
206 inline const char* prettytype(IncDecOp) { return "IncDecOp"; }
207 inline const char* prettytype(ObjMethodOp) { return "ObjMethodOp"; }
208 inline const char* prettytype(BareThisOp) { return "BareThisOp"; }
209 inline const char* prettytype(InitPropOp) { return "InitPropOp"; }
210 inline const char* prettytype(SilenceOp) { return "SilenceOp"; }
211 inline const char* prettytype(SwitchKind) { return "SwitchKind"; }
212 inline const char* prettytype(MOpMode) { return "MOpMode"; }
213 inline const char* prettytype(QueryMOp) { return "QueryMOp"; }
214 inline const char* prettytype(SetRangeOp) { return "SetRangeOp"; }
215 inline const char* prettytype(TypeStructResolveOp) {
216 return "TypeStructResolveOp";
218 inline const char* prettytype(HasGenericsOp) { return "HasGenericsOp"; }
219 inline const char* prettytype(CudOp) { return "CudOp"; }
220 inline const char* prettytype(ContCheckOp) { return "ContCheckOp"; }
221 inline const char* prettytype(SpecialClsRef) { return "SpecialClsRef"; }
222 inline const char* prettytype(CollectionType) { return "CollectionType"; }
224 // load a T value from *pc without incrementing
225 template<class T> T peek(PC pc) {
226 T v;
227 std::memcpy(&v, pc, sizeof v);
228 TRACE(2, "decode: Immediate %s %" PRIi64"\n", prettytype(v), int64_t(v));
229 return v;
232 template<class T> T decode(PC& pc) {
233 auto v = peek<T>(pc);
234 pc += sizeof(T);
235 return v;
238 inline const StringData* decode_litstr(PC& pc) {
239 auto id = decode<Id>(pc);
240 return liveUnit()->lookupLitstrId(id);
243 inline const ArrayData* decode_litarr(PC& pc) {
244 return liveUnit()->lookupArrayId(decode<Id>(pc));
247 namespace {
249 // wrapper for local variable LA operand
250 struct local_var {
251 TypedValue* ptr;
252 int32_t index;
253 TypedValue* operator->() const { return ptr; }
254 TypedValue& operator*() const { return *ptr; }
257 // wrapper for class-ref slot CA(R|W) operand
258 struct clsref_slot {
259 cls_ref* ptr;
260 uint32_t index;
262 std::pair<ArrayData*, Class*> take() const {
263 auto cls = ptr->cls;
264 auto ret = std::make_pair(ptr->reified_types, cls.get());
265 if (debug) {
266 cls->validate();
267 memset(ptr, kTrashClsRef, sizeof(*ptr));
269 return ret;
272 void put(ArrayData* reified_types, Class* cls) {
273 *ptr = cls_ref{reified_types, cls};
277 // wrapper to handle unaligned access to variadic immediates
278 template<class T> struct imm_array {
279 uint32_t const size;
280 PC const ptr;
282 explicit imm_array(uint32_t size, PC pc)
283 : size{size}
284 , ptr{pc}
287 T operator[](uint32_t i) const {
288 T e;
289 memcpy(&e, ptr + i * sizeof(T), sizeof(T));
290 return e;
296 ALWAYS_INLINE local_var decode_local(PC& pc) {
297 auto la = decode_iva(pc);
298 assertx(la < vmfp()->m_func->numLocals());
299 return local_var{frame_local(vmfp(), la), safe_cast<int32_t>(la)};
302 ALWAYS_INLINE Iter* decode_iter(PC& pc) {
303 auto ia = decode_iva(pc);
304 return frame_iter(vmfp(), ia);
307 ALWAYS_INLINE clsref_slot decode_clsref_slot(PC& pc) {
308 uint32_t ca = decode_iva(pc);
309 assertx(ca < vmfp()->m_func->numClsRefSlots());
310 return clsref_slot{frame_clsref_slot(vmfp(), ca), ca};
313 template<typename T>
314 OPTBLD_INLINE imm_array<T> decode_imm_array(PC& pc) {
315 auto const size = decode_iva(pc);
316 auto const arr_pc = pc;
317 pc += size * sizeof(T);
318 return imm_array<T>{size, arr_pc};
321 OPTBLD_INLINE IterTable decode_iter_table(PC& pc) {
322 return iterTableFromStream(pc);
325 OPTBLD_INLINE RepoAuthType decode_rat(PC& pc) {
326 if (debug) return decodeRAT(liveUnit(), pc);
328 pc += encodedRATSize(pc);
329 return RepoAuthType{};
332 //=============================================================================
333 // Miscellaneous helpers.
335 static inline Class* frameStaticClass(ActRec* fp) {
336 if (!fp->func()->cls()) return nullptr;
337 if (fp->hasThis()) {
338 return fp->getThis()->getVMClass();
340 return fp->getClass();
343 //=============================================================================
344 // VarEnv.
346 const StaticString s_GLOBALS("GLOBALS");
348 void VarEnv::createGlobal() {
349 assertx(!g_context->m_globalVarEnv);
350 g_context->m_globalVarEnv = req::make_raw<VarEnv>();
353 VarEnv::VarEnv()
354 : m_nvTable()
355 , m_extraArgs(nullptr)
356 , m_depth(0)
357 , m_global(true)
359 TRACE(3, "Creating VarEnv %p [global scope]\n", this);
360 auto globals_var = Variant::attach(
361 new (tl_heap->objMalloc(sizeof(GlobalsArray))) GlobalsArray(&m_nvTable)
363 m_nvTable.set(s_GLOBALS.get(), globals_var.asTypedValue());
366 VarEnv::VarEnv(ActRec* fp, ExtraArgs* eArgs)
367 : m_nvTable(fp)
368 , m_extraArgs(eArgs)
369 , m_depth(1)
370 , m_global(false)
372 assertx(fp->func()->attrs() & AttrMayUseVV);
373 TRACE(3, "Creating lazily attached VarEnv %p on stack\n", this);
376 VarEnv::VarEnv(const VarEnv* varEnv, ActRec* fp)
377 : m_nvTable(varEnv->m_nvTable, fp)
378 , m_extraArgs(varEnv->m_extraArgs ? varEnv->m_extraArgs->clone(fp) : nullptr)
379 , m_depth(1)
380 , m_global(false)
382 assertx(varEnv->m_depth == 1);
383 assertx(!varEnv->m_global);
384 assertx(fp->func()->attrs() & AttrMayUseVV);
386 TRACE(3, "Cloning VarEnv %p to %p\n", varEnv, this);
389 VarEnv::~VarEnv() {
390 TRACE(3, "Destroying VarEnv %p [%s]\n",
391 this,
392 isGlobalScope() ? "global scope" : "local scope");
393 assertx(isGlobalScope() == (g_context->m_globalVarEnv == this));
395 if (isGlobalScope()) {
397 * When detaching the global scope, we leak any live objects (and
398 * let MemoryManager clean them up). This is because we're
399 * not supposed to run destructors for objects that are live at
400 * the end of a request.
402 m_nvTable.unset(s_GLOBALS.get());
403 m_nvTable.leak();
405 // at this point, m_nvTable is destructed, and GlobalsArray
406 // has a dangling pointer to it.
409 void VarEnv::deallocate(ActRec* fp) {
410 fp->m_varEnv->exitFP(fp);
413 VarEnv* VarEnv::createLocal(ActRec* fp) {
414 return req::make_raw<VarEnv>(fp, fp->getExtraArgs());
417 VarEnv* VarEnv::clone(ActRec* fp) const {
418 return req::make_raw<VarEnv>(this, fp);
421 void VarEnv::suspend(const ActRec* oldFP, ActRec* newFP) {
422 m_nvTable.suspend(oldFP, newFP);
425 void VarEnv::enterFP(ActRec* oldFP, ActRec* newFP) {
426 TRACE(3, "Attaching VarEnv %p [%s] %d fp @%p\n",
427 this,
428 isGlobalScope() ? "global scope" : "local scope",
429 int(newFP->m_func->numNamedLocals()), newFP);
430 assertx(newFP);
431 if (oldFP == nullptr) {
432 assertx(isGlobalScope() && m_depth == 0);
433 } else {
434 assertx(m_depth >= 1);
435 assertx(g_context->getPrevVMStateSkipFrame(newFP) == oldFP);
436 if (debug) {
437 auto prev = newFP;
438 while (true) {
439 prev = g_context->getPrevVMState(prev);
440 if (prev == oldFP) break;
441 assertx(!(prev->m_func->attrs() & AttrMayUseVV) || !prev->hasVarEnv());
444 m_nvTable.detach(oldFP);
447 assertx(newFP->func()->attrs() & AttrMayUseVV);
448 m_nvTable.attach(newFP);
449 m_depth++;
452 void VarEnv::exitFP(ActRec* fp) {
453 TRACE(3, "Detaching VarEnv %p [%s] @%p\n",
454 this,
455 isGlobalScope() ? "global scope" : "local scope",
456 fp);
457 assertx(fp);
458 assertx(m_depth > 0);
460 m_depth--;
461 m_nvTable.detach(fp);
463 if (m_depth == 0) {
464 if (m_extraArgs) {
465 assertx(!isGlobalScope());
466 const auto numExtra = fp->numArgs() - fp->m_func->numNonVariadicParams();
467 ExtraArgs::deallocate(m_extraArgs, numExtra);
470 // don't free global VarEnv
471 if (!isGlobalScope()) {
472 req::destroy_raw(this);
474 } else {
475 while (true) {
476 auto const prevFP = g_context->getPrevVMState(fp);
477 if (prevFP->func()->attrs() & AttrMayUseVV &&
478 prevFP->m_varEnv == this) {
479 m_nvTable.attach(prevFP);
480 break;
482 fp = prevFP;
487 void VarEnv::set(const StringData* name, tv_rval tv) {
488 m_nvTable.set(name, tv);
491 void VarEnv::bind(const StringData* name, tv_lval tv) {
492 m_nvTable.bind(name, tv);
495 void VarEnv::setWithRef(const StringData* name, TypedValue* tv) {
496 if (isRefType(tv->m_type)) {
497 bind(name, tv);
498 } else {
499 set(name, tv);
503 TypedValue* VarEnv::lookup(const StringData* name) {
504 return m_nvTable.lookup(name);
507 TypedValue* VarEnv::lookupAdd(const StringData* name) {
508 return m_nvTable.lookupAdd(name);
511 bool VarEnv::unset(const StringData* name) {
512 m_nvTable.unset(name);
513 return true;
516 const StaticString s_closure_var("0Closure");
517 const StaticString s_reified_generics_var("0ReifiedGenerics");
519 Array VarEnv::getDefinedVariables() const {
520 Array ret = Array::Create();
522 NameValueTable::Iterator iter(&m_nvTable);
523 for (; iter.valid(); iter.next()) {
524 auto const sd = iter.curKey();
525 auto const tv = iter.curVal();
526 // Closures have an internal 0Closure variable
527 // Reified functions have an internal 0ReifiedGenerics variable
528 if (s_closure_var.equal(sd) || s_reified_generics_var.equal(sd)) {
529 continue;
531 if (tvAsCVarRef(tv).isReferenced()) {
532 ret.setWithRef(StrNR(sd).asString(), tvAsCVarRef(tv));
533 } else {
534 ret.set(StrNR(sd).asString(), tvAsCVarRef(tv));
538 // Make result independent of the hashtable implementation.
539 ArrayData* sorted = ret->escalateForSort(SORTFUNC_KSORT);
540 assertx(sorted == ret.get() ||
541 sorted->empty() ||
542 sorted->hasExactlyOneRef());
543 SCOPE_EXIT {
544 if (sorted != ret.get()) {
545 ret = Array::attach(sorted);
548 sorted->ksort(0, true);
550 return ret;
553 TypedValue* VarEnv::getExtraArg(unsigned argInd) const {
554 return m_extraArgs->getExtraArg(argInd);
557 //=============================================================================
559 ExtraArgs::ExtraArgs() {}
560 ExtraArgs::~ExtraArgs() {}
562 void* ExtraArgs::allocMem(unsigned nargs) {
563 assertx(nargs > 0);
564 return req::malloc(
565 sizeof(TypedValue) * nargs + sizeof(ExtraArgs),
566 type_scan::getIndexForMalloc<
567 ExtraArgs,
568 type_scan::Action::WithSuffix<TypedValue>
573 ExtraArgs* ExtraArgs::allocateCopy(TypedValue* args, unsigned nargs) {
574 void* mem = allocMem(nargs);
575 ExtraArgs* ea = new (mem) ExtraArgs();
578 * The stack grows downward, so the args in memory are "backward"; i.e. the
579 * leftmost (in PHP) extra arg is highest in memory.
581 std::reverse_copy(args, args + nargs, &ea->m_extraArgs[0]);
582 return ea;
585 ExtraArgs* ExtraArgs::allocateUninit(unsigned nargs) {
586 void* mem = ExtraArgs::allocMem(nargs);
587 return new (mem) ExtraArgs();
590 void ExtraArgs::deallocate(ExtraArgs* ea, unsigned nargs) {
591 assertx(nargs > 0);
592 for (unsigned i = 0; i < nargs; ++i) {
593 tvDecRefGen(ea->m_extraArgs + i);
595 deallocateRaw(ea);
598 void ExtraArgs::deallocate(ActRec* ar) {
599 const int numExtra = ar->numArgs() - ar->m_func->numNonVariadicParams();
600 deallocate(ar->getExtraArgs(), numExtra);
603 void ExtraArgs::deallocateRaw(ExtraArgs* ea) {
604 ea->~ExtraArgs();
605 req::free(ea);
608 ExtraArgs* ExtraArgs::clone(ActRec* ar) const {
609 const int numExtra = ar->numArgs() - ar->m_func->numParams();
610 auto ret = allocateUninit(numExtra);
611 for (int i = 0; i < numExtra; ++i) {
612 tvDupWithRef(m_extraArgs[i], ret->m_extraArgs[i]);
614 return ret;
617 TypedValue* ExtraArgs::getExtraArg(unsigned argInd) const {
618 return const_cast<TypedValue*>(&m_extraArgs[argInd]);
621 //=============================================================================
622 // Stack.
624 // Store actual stack elements array in a thread-local in order to amortize the
625 // cost of allocation.
626 struct StackElms {
627 ~StackElms() { free(m_elms); }
628 TypedValue* elms() {
629 if (m_elms == nullptr) {
630 // RuntimeOption::EvalVMStackElms-sized and -aligned.
631 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
632 if (posix_memalign((void**)&m_elms, algnSz, algnSz) != 0) {
633 throw std::runtime_error(
634 std::string("VM stack initialization failed: ") +
635 folly::errnoStr(errno).c_str());
638 madvise(m_elms, algnSz, MADV_DONTNEED);
639 numa_bind_to(m_elms, algnSz, s_numaNode);
641 return m_elms;
643 void flush() {
644 if (m_elms != nullptr) {
645 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
646 madvise(m_elms, algnSz, MADV_DONTNEED);
649 private:
650 TypedValue* m_elms{nullptr};
652 THREAD_LOCAL(StackElms, t_se);
654 const int Stack::sSurprisePageSize = sysconf(_SC_PAGESIZE);
655 // We reserve the bottom page of each stack for use as the surprise
656 // page, so the minimum useful stack size is the next power of two.
657 const uint32_t Stack::sMinStackElms =
658 2 * sSurprisePageSize / sizeof(TypedValue);
660 void Stack::ValidateStackSize() {
661 if (RuntimeOption::EvalVMStackElms < sMinStackElms) {
662 throw std::runtime_error(folly::sformat(
663 "VM stack size of {:#x} is below the minimum of {:#x}",
664 RuntimeOption::EvalVMStackElms,
665 sMinStackElms
668 if (!folly::isPowTwo(RuntimeOption::EvalVMStackElms)) {
669 throw std::runtime_error(folly::sformat(
670 "VM stack size of {:#x} is not a power of 2",
671 RuntimeOption::EvalVMStackElms
676 Stack::Stack()
677 : m_elms(nullptr), m_top(nullptr), m_base(nullptr) {
680 Stack::~Stack() {
681 requestExit();
684 void Stack::requestInit() {
685 m_elms = t_se->elms();
686 // Burn one element of the stack, to satisfy the constraint that
687 // valid m_top values always have the same high-order (>
688 // log(RuntimeOption::EvalVMStackElms)) bits.
689 m_top = m_base = m_elms + RuntimeOption::EvalVMStackElms - 1;
691 rds::header()->stackLimitAndSurprise.store(
692 reinterpret_cast<uintptr_t>(
693 reinterpret_cast<char*>(m_elms) + sSurprisePageSize +
694 kStackCheckPadding * sizeof(Cell)
696 std::memory_order_release
698 assertx(!(rds::header()->stackLimitAndSurprise.load() & kSurpriseFlagMask));
700 // Because of the surprise page at the bottom of the stack we lose an
701 // additional 256 elements which must be taken into account when checking for
702 // overflow.
703 UNUSED size_t maxelms =
704 RuntimeOption::EvalVMStackElms - sSurprisePageSize / sizeof(TypedValue);
705 assertx(!wouldOverflow(maxelms - 1));
706 assertx(wouldOverflow(maxelms));
709 void Stack::requestExit() {
710 m_elms = nullptr;
713 void flush_evaluation_stack() {
714 if (vmStack().isAllocated()) {
715 // For RPCRequestHandler threads, the ExecutionContext can stay
716 // alive across requests, but its always ok to kill it between
717 // requests, so do so now
718 RPCRequestHandler::cleanupState();
721 tl_heap->flush();
723 if (!t_se.isNull()) {
724 t_se->flush();
726 rds::flush();
727 json_parser_flush_caches();
729 always_assert(tl_heap->empty());
732 static std::string toStringElm(const TypedValue* tv) {
733 std::ostringstream os;
735 if (!isRealType(tv->m_type)) {
736 os << " ??? type " << static_cast<data_type_t>(tv->m_type) << "\n";
737 return os.str();
739 if (isRefcountedType(tv->m_type) &&
740 !tv->m_data.pcnt->checkCount()) {
741 // OK in the invoking frame when running a destructor.
742 os << " ??? inner_count " << tvGetCount(*tv) << " ";
743 return os.str();
746 auto print_count = [&] {
747 if (tv->m_data.pcnt->isStatic()) {
748 os << ":c(static)";
749 } else if (tv->m_data.pcnt->isUncounted()) {
750 os << ":c(uncounted)";
751 } else {
752 os << ":c(" << tvGetCount(*tv) << ")";
756 switch (tv->m_type) {
757 case KindOfRef:
758 os << "V:(";
759 os << "@" << tv->m_data.pref;
760 os << toStringElm(tv->m_data.pref->cell());
761 os << ")";
762 return os.str();
763 case KindOfUninit:
764 case KindOfNull:
765 case KindOfBoolean:
766 case KindOfInt64:
767 case KindOfDouble:
768 case KindOfPersistentString:
769 case KindOfString:
770 case KindOfPersistentVec:
771 case KindOfVec:
772 case KindOfPersistentDict:
773 case KindOfDict:
774 case KindOfPersistentKeyset:
775 case KindOfKeyset:
776 case KindOfPersistentShape:
777 case KindOfShape:
778 case KindOfPersistentArray:
779 case KindOfArray:
780 case KindOfObject:
781 case KindOfResource:
782 case KindOfFunc:
783 case KindOfClass:
784 case KindOfClsMeth:
785 case KindOfRecord:
786 os << "C:";
787 break;
790 do {
791 switch (tv->m_type) {
792 case KindOfUninit:
793 os << "Uninit";
794 continue;
795 case KindOfNull:
796 os << "Null";
797 continue;
798 case KindOfBoolean:
799 os << (tv->m_data.num ? "True" : "False");
800 continue;
801 case KindOfInt64:
802 os << "0x" << std::hex << tv->m_data.num << std::dec;
803 continue;
804 case KindOfDouble:
805 os << tv->m_data.dbl;
806 continue;
807 case KindOfPersistentString:
808 case KindOfString:
810 int len = tv->m_data.pstr->size();
811 bool truncated = false;
812 if (len > 128) {
813 len = 128;
814 truncated = true;
816 os << tv->m_data.pstr;
817 print_count();
818 os << ":\""
819 << escapeStringForCPP(tv->m_data.pstr->data(), len)
820 << "\"" << (truncated ? "..." : "");
822 continue;
823 case KindOfPersistentVec:
824 case KindOfVec:
825 assertx(tv->m_data.parr->isVecArray());
826 assertx(tv->m_data.parr->checkCount());
827 os << tv->m_data.parr;
828 print_count();
829 os << ":Vec";
830 continue;
831 case KindOfPersistentDict:
832 case KindOfDict:
833 assertx(tv->m_data.parr->isDict());
834 assertx(tv->m_data.parr->checkCount());
835 os << tv->m_data.parr;
836 print_count();
837 os << ":Dict";
838 continue;
839 case KindOfPersistentKeyset:
840 case KindOfKeyset:
841 assertx(tv->m_data.parr->isKeyset());
842 assertx(tv->m_data.parr->checkCount());
843 os << tv->m_data.parr;
844 print_count();
845 os << ":Keyset";
846 continue;
847 case KindOfPersistentShape:
848 case KindOfShape:
849 assertx(tv->m_data.parr->isShape());
850 assertx(tv->m_data.parr->checkCount());
851 os << tv->m_data.parr;
852 print_count();
853 os << ":Shape";
854 continue;
855 case KindOfPersistentArray:
856 case KindOfArray:
857 assertx(tv->m_data.parr->isPHPArray());
858 assertx(tv->m_data.parr->checkCount());
859 os << tv->m_data.parr;
860 print_count();
861 os << ":Array";
862 continue;
863 case KindOfObject:
864 assertx(tv->m_data.pobj->checkCount());
865 os << tv->m_data.pobj;
866 print_count();
867 os << ":Object("
868 << tv->m_data.pobj->getClassName().get()->data()
869 << ")";
870 continue;
871 case KindOfRecord:
872 assertx(tv->m_data.prec->checkCount());
873 os << tv->m_data.prec;
874 print_count();
875 os << ":Record("
876 << tv->m_data.prec->getRecord()->name()->data()
877 << ")";
878 continue;
879 case KindOfResource:
880 assertx(tv->m_data.pres->checkCount());
881 os << tv->m_data.pres;
882 print_count();
883 os << ":Resource("
884 << tv->m_data.pres->data()->o_getClassName().get()->data()
885 << ")";
886 continue;
887 case KindOfFunc:
888 os << ":Func("
889 << tv->m_data.pfunc->fullDisplayName()->data()
890 << ")";
891 continue;
892 case KindOfClass:
893 os << ":Class("
894 << tv->m_data.pclass->name()->data()
895 << ")";
896 continue;
897 case KindOfClsMeth:
898 os << ":ClsMeth("
899 << tv->m_data.pclsmeth->getCls()->name()->data()
900 << ", "
901 << tv->m_data.pclsmeth->getFunc()->fullDisplayName()->data()
902 << ")";
903 continue;
905 case KindOfRef:
906 break;
908 not_reached();
909 } while (0);
911 return os.str();
914 static std::string toStringIter(const Iter* it) {
915 switch (it->arr().getIterType()) {
916 case ArrayIter::TypeUndefined:
917 return "I:Undefined";
918 case ArrayIter::TypeArray:
919 return "I:Array";
920 case ArrayIter::TypeIterator:
921 return "I:Iterator";
923 assertx(false);
924 return "I:?";
928 * Return true if Offset o is inside the protected region of a fault
929 * funclet for iterId, otherwise false.
931 static bool checkIterScope(const Func* f, Offset o, Id iterId) {
932 assertx(o >= f->base() && o < f->past());
933 for (auto const& eh : f->ehtab()) {
934 if (eh.m_base <= o && o < eh.m_past &&
935 eh.m_iterId == iterId) {
936 return true;
939 return false;
942 static void toStringFrame(std::ostream& os, const ActRec* fp,
943 int offset, const TypedValue* ftop,
944 const std::string& prefix, bool isTop = true) {
945 assertx(fp);
947 // Use depth-first recursion to output the most deeply nested stack frame
948 // first.
950 Offset prevPc = 0;
951 TypedValue* prevStackTop = nullptr;
952 ActRec* prevFp = g_context->getPrevVMState(fp, &prevPc, &prevStackTop);
953 if (prevFp != nullptr) {
954 toStringFrame(os, prevFp, prevPc, prevStackTop, prefix, false);
958 os << prefix;
959 const Func* func = fp->m_func;
960 assertx(func);
961 func->validate();
962 std::string funcName(func->fullName()->data());
963 os << "{func:" << funcName
964 << ",callOff:" << fp->m_callOff
965 << ",this:0x"
966 << std::hex << (func->cls() && fp->hasThis() ? fp->getThis() : nullptr)
967 << std::dec << "}";
968 TypedValue* tv = (TypedValue*)fp;
969 tv--;
971 if (func->numLocals() > 0) {
972 // Don't print locals for parent frames on a Ret(C|V) since some of them
973 // may already be destructed.
974 if (isRet(func->unit()->getOp(offset)) && !isTop) {
975 os << "<locals destroyed>";
976 } else {
977 os << "<";
978 int n = func->numLocals();
979 for (int i = 0; i < n; i++, tv--) {
980 if (i > 0) {
981 os << " ";
983 os << toStringElm(tv);
985 os << ">";
989 if (func->numIterators() > 0) {
990 os << "|";
991 Iter* it = &((Iter*)&tv[1])[-1];
992 for (int i = 0; i < func->numIterators(); i++, it--) {
993 if (i > 0) {
994 os << " ";
996 if (checkIterScope(func, offset, i)) {
997 os << toStringIter(it);
998 } else {
999 os << "I:Undefined";
1002 os << "|";
1005 // Ideally we'd like to display the contents of the class-ref slots here, but
1006 // we have no metadata to tell us which ones are currently occupied and valid.
1008 std::vector<std::string> stackElems;
1009 visitStackElems(
1010 fp, ftop, offset,
1011 [&](const ActRec* ar, Offset) {
1012 stackElems.push_back(
1013 folly::format("{{func:{}}}", ar->m_func->fullName()->data()).str()
1016 [&](const TypedValue* tv) {
1017 stackElems.push_back(toStringElm(tv));
1020 std::reverse(stackElems.begin(), stackElems.end());
1021 os << ' ' << folly::join(' ', stackElems);
1023 os << '\n';
1026 std::string Stack::toString(const ActRec* fp, int offset,
1027 const std::string prefix/* = "" */) const {
1028 // The only way to figure out which stack elements are activation records is
1029 // to follow the frame chain. However, the goal for each stack frame is to
1030 // print stack fragments from deepest to shallowest -- a then b in the
1031 // following example:
1033 // {func:foo,callOff:51}<C:8> {func:bar} C:8 C:1 {func:biz} C:0
1034 // aaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbb
1036 // Use depth-first recursion to get the output order correct.
1038 std::ostringstream os;
1039 auto unit = fp->unit();
1040 auto func = fp->func();
1041 os << prefix << "=== Stack at "
1042 << unit->filepath()->data() << ":"
1043 << unit->getLineNumber(unit->offsetOf(vmpc()))
1044 << " func " << func->fullName()->data() << " ===\n";
1046 toStringFrame(os, fp, offset, m_top, prefix);
1048 return os.str();
1051 bool Stack::wouldOverflow(int numCells) const {
1052 // The funny approach here is to validate the translator's assembly
1053 // technique. We've aligned and sized the stack so that the high order
1054 // bits of valid cells are all the same. In the translator, numCells
1055 // can be hardcoded, and m_top is wired into a register,
1056 // so the expression requires no loads.
1057 intptr_t truncatedTop = intptr_t(m_top) / sizeof(TypedValue);
1058 truncatedTop &= RuntimeOption::EvalVMStackElms - 1;
1059 intptr_t diff = truncatedTop - numCells -
1060 sSurprisePageSize / sizeof(TypedValue);
1061 return diff < 0;
1064 TypedValue* Stack::anyFrameStackBase(const ActRec* fp) {
1065 return fp->resumed() ? Stack::resumableStackBase(fp)
1066 : Stack::frameStackBase(fp);
1069 TypedValue* Stack::frameStackBase(const ActRec* fp) {
1070 assertx(!fp->resumed());
1071 return (TypedValue*)fp - fp->func()->numSlotsInFrame();
1074 TypedValue* Stack::resumableStackBase(const ActRec* fp) {
1075 assertx(fp->resumed());
1076 auto sfp = fp->sfp();
1077 if (sfp) {
1078 // The non-reentrant case occurs when a non-async or async generator is
1079 // resumed via ContEnter or ContRaise opcode. These opcodes leave a single
1080 // value on the stack that becomes part of the generator's stack. So we
1081 // find the caller's FP, compensate for its locals and iterators, and then
1082 // we've found the base of the generator's stack.
1083 assertx(fp->func()->isGenerator());
1085 // Since resumables are stored on the heap, we need to go back in the
1086 // callstack a bit to find the base of the stack. Unfortunately, due to
1087 // generator delegation, this can be pretty far back...
1088 while (sfp->func()->isGenerator()) {
1089 sfp = sfp->sfp();
1092 return (TypedValue*)sfp - sfp->func()->numSlotsInFrame();
1093 } else {
1094 // The reentrant case occurs when asio scheduler resumes an async function
1095 // or async generator. We simply use the top of stack of the previous VM
1096 // frame (since the ActRec, locals, and iters for this frame do not reside
1097 // on the VM stack).
1098 assertx(fp->func()->isAsync());
1099 return g_context.getNoCheck()->m_nestedVMs.back().sp;
1103 Array getDefinedVariables(const ActRec* fp) {
1104 if (UNLIKELY(fp == nullptr)) return empty_array();
1106 if ((fp->func()->attrs() & AttrMayUseVV) && fp->hasVarEnv()) {
1107 return fp->m_varEnv->getDefinedVariables();
1109 auto const func = fp->m_func;
1110 auto const numLocals = func->numNamedLocals();
1111 ArrayInit ret(numLocals, ArrayInit::Map{});
1112 for (Id id = 0; id < numLocals; ++id) {
1113 TypedValue* ptv = frame_local(fp, id);
1114 if (ptv->m_type == KindOfUninit) {
1115 continue;
1117 Variant name(func->localVarName(id), Variant::PersistentStrInit{});
1118 ret.add(name, tvAsVariant(ptv));
1120 return ret.toArray();
1123 NEVER_INLINE
1124 static void shuffleExtraStackArgs(ActRec* ar) {
1125 const Func* func = ar->m_func;
1126 assertx(func);
1128 // the last (variadic) param is included in numParams (since it has a
1129 // name), but the arg in that slot should be included as the first
1130 // element of the variadic array
1131 const auto numArgs = ar->numArgs();
1132 const auto numVarArgs = numArgs - func->numNonVariadicParams();
1133 assertx(numVarArgs > 0);
1135 const auto takesVariadicParam = func->hasVariadicCaptureParam();
1136 auto& stack = vmStack();
1137 if (func->attrs() & AttrMayUseVV) {
1138 auto const tvArgs = reinterpret_cast<TypedValue*>(ar) - numArgs;
1139 ar->setExtraArgs(ExtraArgs::allocateCopy(tvArgs, numVarArgs));
1140 if (takesVariadicParam) {
1141 try {
1142 VArrayInit ai{numVarArgs};
1143 for (uint32_t i = 0; i < numVarArgs; ++i) {
1144 ai.appendWithRef(*(tvArgs + numVarArgs - 1 - i));
1146 // Remove them from the stack
1147 stack.ndiscard(numVarArgs);
1148 if (RuntimeOption::EvalHackArrDVArrs) {
1149 stack.pushVecNoRc(ai.create());
1150 } else {
1151 stack.pushArrayNoRc(ai.create());
1153 // Before, for each arg: refcount = n + 1 (stack)
1154 // After, for each arg: refcount = n + 2 (ExtraArgs, varArgsArray)
1155 } catch (...) {
1156 ExtraArgs::deallocateRaw(ar->getExtraArgs());
1157 ar->resetExtraArgs();
1158 throw;
1160 } else {
1161 // Discard the arguments from the stack; they were all moved
1162 // into the extra args so we don't decref.
1163 stack.ndiscard(numVarArgs);
1165 // leave ar->numArgs reflecting the actual number of args passed
1166 } else {
1167 assertx(takesVariadicParam); // called only if extra args are used
1168 auto tvArgs = reinterpret_cast<TypedValue*>(ar) - numArgs + numVarArgs - 1;
1169 VArrayInit ai{numVarArgs};
1170 for (uint32_t i = 0; i < numVarArgs; ++i) {
1171 ai.appendWithRef(*(tvArgs--));
1173 // Discard the arguments from the stack
1174 for (uint32_t i = 0; i < numVarArgs; ++i) stack.popTV();
1175 if (RuntimeOption::EvalHackArrDVArrs) {
1176 stack.pushVecNoRc(ai.create());
1177 } else {
1178 stack.pushArrayNoRc(ai.create());
1180 assertx(func->numParams() == (numArgs - numVarArgs + 1));
1181 ar->setNumArgs(func->numParams());
1185 static void shuffleMagicArgs(ActRec* ar) {
1186 assertx(ar->magicDispatch());
1188 // We need to put this where the first argument is
1189 auto const invName = ar->clearMagicDispatch();
1190 int const nargs = ar->numArgs();
1192 // We need to make an array containing all the arguments passed by
1193 // the caller and put it where the second argument is.
1194 auto argArray = Array::attach(
1195 [&]{
1196 auto const args = reinterpret_cast<TypedValue*>(ar) - nargs;
1197 if (RuntimeOption::EvalHackArrDVArrs) {
1198 return nargs
1199 ? PackedArray::MakeVec(nargs, args)
1200 : staticEmptyVecArray();
1202 return nargs ? PackedArray::MakeVArray(nargs, args) : staticEmptyVArray();
1206 auto& stack = vmStack();
1207 // Remove the arguments from the stack; they were moved into the
1208 // array so we don't need to decref.
1209 stack.ndiscard(nargs);
1211 // Move invName to where the first argument belongs, no need
1212 // to incRef/decRef since we are transferring ownership
1213 stack.pushStringNoRc(invName);
1215 // Move argArray to where the second argument belongs. We've already
1216 // incReffed the array above so we don't need to do it here.
1217 if (RuntimeOption::EvalHackArrDVArrs) {
1218 stack.pushVecNoRc(argArray.detach());
1219 } else {
1220 stack.pushArrayNoRc(argArray.detach());
1223 ar->setNumArgs(2);
1224 ar->setVarEnv(nullptr);
1227 // This helper is meant to be called if an exception or invalidation takes
1228 // place in the process of function entry; the ActRec ar is on the stack
1229 // but is not (yet) the current (executing) frame and is followed by a
1230 // number of params
1231 static NEVER_INLINE void cleanupParamsAndActRec(Stack& stack,
1232 ActRec* ar,
1233 ExtraArgs* extraArgs,
1234 int* numParams) {
1235 assertx(stack.top() + (numParams != nullptr ? (*numParams) :
1236 extraArgs != nullptr ? ar->m_func->numParams() :
1237 ar->numArgs())
1238 == (void*)ar);
1239 if (extraArgs) {
1240 const int numExtra = ar->numArgs() - ar->m_func->numNonVariadicParams();
1241 ExtraArgs::deallocate(extraArgs, numExtra);
1243 while (stack.top() != (void*)ar) {
1244 stack.popTV();
1246 stack.popAR();
1249 static NEVER_INLINE void shuffleMagicArrayArgs(ActRec* ar, const Cell args,
1250 Stack& stack, int nregular) {
1251 assertx(ar != nullptr && ar->magicDispatch());
1252 assertx(!cellIsNull(&args));
1253 assertx(nregular >= 0);
1254 assertx((stack.top() + nregular) == (void*) ar);
1255 assertx(isContainer(args));
1256 DEBUG_ONLY const Func* f = ar->m_func;
1257 assertx(f &&
1258 (f->name()->isame(s___call.get()) ||
1259 f->name()->isame(s___callStatic.get())));
1261 // We'll need to make this the first argument
1262 auto const invName = ar->clearMagicDispatch();
1264 auto nargs = getContainerSize(args);
1266 if (UNLIKELY(0 == nargs)) {
1267 // We need to make an array containing all the arguments passed by
1268 // the caller and put it where the second argument is.
1269 auto argArray = Array::attach(
1270 [&]{
1271 auto const args = reinterpret_cast<TypedValue*>(ar) - nregular;
1272 if (RuntimeOption::EvalHackArrDVArrs) {
1273 return nregular
1274 ? PackedArray::MakeVec(nregular, args)
1275 : staticEmptyVecArray();
1277 return nregular
1278 ? PackedArray::MakeVArray(nregular, args)
1279 : staticEmptyVArray();
1283 // Remove the arguments from the stack; they were moved into the
1284 // array so we don't need to decref.
1285 stack.ndiscard(nregular);
1287 // Move invName to where the first argument belongs, no need
1288 // to incRef/decRef since we are transferring ownership
1289 assertx(stack.top() == (void*) ar);
1290 stack.pushStringNoRc(invName);
1292 // Move argArray to where the second argument belongs. We've already
1293 // incReffed the array above so we don't need to do it here.
1294 if (RuntimeOption::EvalHackArrDVArrs) {
1295 stack.pushVecNoRc(argArray.detach());
1296 } else {
1297 stack.pushArrayNoRc(argArray.detach());
1299 } else {
1300 if (nregular == 0 &&
1301 !RuntimeOption::EvalHackArrDVArrs &&
1302 isArrayType(args.m_type) &&
1303 args.m_data.parr->isVArray()) {
1304 assertx(stack.top() == (void*) ar);
1305 stack.pushStringNoRc(invName);
1306 stack.pushArray(args.m_data.parr);
1307 } else if (nregular == 0 &&
1308 RuntimeOption::EvalHackArrDVArrs &&
1309 isVecType(args.m_type)) {
1310 assertx(stack.top() == (void*) ar);
1311 stack.pushStringNoRc(invName);
1312 stack.pushVec(args.m_data.parr);
1313 } else {
1314 VArrayInit ai(nargs + nregular);
1315 // The arguments are pushed in order, so we should refer them by
1316 // index instead of taking the top, that would lead to reverse order.
1317 for (int i = nregular - 1; i >= 0; --i) {
1318 // appendWithRef bumps the refcount and splits if necessary,
1319 // to compensate for the upcoming pop from the stack
1320 ai.appendWithRef(tvAsVariant(stack.indTV(i)));
1322 for (int i = 0; i < nregular; ++i) {
1323 stack.popTV();
1325 assertx(stack.top() == (void*) ar);
1326 stack.pushStringNoRc(invName);
1327 for (ArrayIter iter(args); iter; ++iter) {
1328 ai.appendWithRef(iter.secondValPlus());
1330 if (RuntimeOption::EvalHackArrDVArrs) {
1331 stack.pushVecNoRc(ai.create());
1332 } else {
1333 stack.pushArrayNoRc(ai.create());
1338 ar->setNumArgs(2);
1339 ar->setVarEnv(nullptr);
1342 // offset is the number of params already on the stack to which the
1343 // contents of args are to be added; for call_user_func_array, this is
1344 // always 0; for unpacked arguments, it may be greater if normally passed
1345 // params precede the unpack.
1346 bool prepareArrayArgs(ActRec* ar, const Cell args, Stack& stack,
1347 int nregular, TypedValue* retval, bool checkRefAnnot) {
1348 assertx(!cellIsNull(&args));
1349 assertx(nregular >= 0);
1350 assertx((stack.top() + nregular) == (void*) ar);
1351 const Func* const f = ar->m_func;
1352 assertx(f);
1354 assertx(isContainer(args));
1355 int const nargs = nregular + getContainerSize(args);
1356 if (UNLIKELY(ar->magicDispatch())) {
1357 shuffleMagicArrayArgs(ar, args, stack, nregular);
1358 return true;
1361 #define WRAP(e) \
1362 try { \
1363 e; \
1364 } catch (...) { \
1365 /* If the user error handler throws an exception, discard the
1366 * uninitialized value(s) at the top of the eval stack so that the
1367 * unwinder doesn't choke */ \
1368 stack.discard(); \
1369 if (retval) { tvWriteNull(*retval); } \
1370 throw; \
1373 auto const calledFromHH = [] {
1374 if (RuntimeOption::EnableHipHopSyntax) return true;
1376 auto fp = vmfp();
1377 while (fp && fp->func()->isBuiltin()) {
1378 fp = g_context->getPrevVMState(fp);
1381 return fp && fp->func()->unit()->isHHFile();
1384 int const nparams = f->numNonVariadicParams();
1385 int nextra_regular = std::max(nregular - nparams, 0);
1386 ArrayIter iter(args);
1387 if (LIKELY(nextra_regular == 0)) {
1388 for (int i = nregular; iter && (i < nparams); ++i, ++iter) {
1389 auto const from = iter.secondValPlus();
1390 TypedValue* to = stack.allocTV();
1391 if (LIKELY(!f->byRef(i))) {
1392 cellDup(tvToCell(from), *to);
1393 } else if (LIKELY(isRefType(from.m_type) &&
1394 from.m_data.pref->hasMultipleRefs())) {
1395 if (checkRefAnnot && calledFromHH()) {
1396 WRAP(raiseParamRefMismatchForFunc(f, i));
1398 refDup(from, *to);
1399 } else {
1400 if (checkRefAnnot && calledFromHH()) {
1401 WRAP(raiseParamRefMismatchForFunc(f, i));
1403 cellDup(tvToCell(from), *to);
1407 if (LIKELY(!iter)) {
1408 // argArray was exhausted, so there are no "extra" arguments but there
1409 // may be a deficit of non-variadic arguments, and the need to push an
1410 // empty array for the variadic argument ... that work is left to
1411 // prepareFuncEntry. Since the stack state is going to be considered
1412 // "trimmed" over there, we need to null the extraArgs/varEnv field if
1413 // the function could read it.
1414 ar->setNumArgs(nargs);
1415 ar->trashVarEnv();
1416 if (!debug || (ar->func()->attrs() & AttrMayUseVV)) {
1417 ar->setVarEnv(nullptr);
1419 return true;
1423 #undef WRAP
1425 // there are "extra" arguments; passed as standard arguments prior to the
1426 // ... unpack operator and/or still remaining in argArray
1427 assertx(nargs > nparams);
1428 assertx(nextra_regular > 0 || !!iter);
1429 if (LIKELY(f->discardExtraArgs())) {
1430 if (UNLIKELY(nextra_regular > 0)) {
1431 // if unpacking, any regularly passed arguments on the stack
1432 // in excess of those expected by the function need to be discarded
1433 // in addition to the ones held in the arry
1434 do { stack.popTV(); } while (--nextra_regular);
1437 // the extra args are not used in the function; no reason to add them
1438 // to the stack
1439 ar->setNumArgs(f->numParams());
1440 return true;
1443 auto const hasVarParam = f->hasVariadicCaptureParam();
1444 auto const extra = nargs - nparams;
1445 if (f->attrs() & AttrMayUseVV) {
1446 ExtraArgs* extraArgs = ExtraArgs::allocateUninit(extra);
1447 VArrayInit ai(extra);
1448 if (UNLIKELY(nextra_regular > 0)) {
1449 // The arguments are pushed in order, so we should refer them by
1450 // index instead of taking the top, that would lead to reverse order.
1451 for (int i = nextra_regular - 1; i >= 0; --i) {
1452 TypedValue* to = extraArgs->getExtraArg(nextra_regular - i - 1);
1453 const TypedValue* from = stack.indTV(i);
1454 if (isRefType(from->m_type) && from->m_data.pref->isReferenced()) {
1455 refCopy(*from, *to);
1456 } else {
1457 cellCopy(*tvToCell(from), *to);
1459 if (hasVarParam) {
1460 // appendWithRef bumps the refcount: this accounts for the fact
1461 // that the extra args values went from being present on the stack
1462 // to being in (both) ExtraArgs and the variadic args
1463 ai.appendWithRef(tvAsCVarRef(from));
1466 stack.ndiscard(nextra_regular);
1468 for (int i = nextra_regular; i < extra; ++i, ++iter) {
1469 TypedValue* to = extraArgs->getExtraArg(i);
1470 auto const from = iter.secondValPlus();
1471 tvDupWithRef(from, *to);
1472 if (hasVarParam) {
1473 ai.appendWithRef(from);
1476 assertx(!iter); // iter should now be exhausted
1477 if (hasVarParam) {
1478 auto const ad = ai.create();
1479 assertx(ad->hasExactlyOneRef());
1480 if (RuntimeOption::EvalHackArrDVArrs) {
1481 stack.pushVecNoRc(ad);
1482 } else {
1483 stack.pushArrayNoRc(ad);
1486 ar->setNumArgs(nargs);
1487 ar->setExtraArgs(extraArgs);
1488 } else {
1489 assertx(hasVarParam);
1490 if (nparams == nregular &&
1491 !RuntimeOption::EvalHackArrDVArrs &&
1492 isArrayType(args.m_type) &&
1493 args.m_data.parr->isVArray()) {
1494 stack.pushArray(args.m_data.parr);
1495 } else if (nparams == nregular &&
1496 RuntimeOption::EvalHackArrDVArrs &&
1497 isVecType(args.m_type)) {
1498 stack.pushVec(args.m_data.parr);
1499 } else {
1500 VArrayInit ai(extra);
1501 if (UNLIKELY(nextra_regular > 0)) {
1502 // The arguments are pushed in order, so we should refer them by
1503 // index instead of taking the top, that would lead to reverse order.
1504 for (int i = nextra_regular - 1; i >= 0; --i) {
1505 // appendWithRef bumps the refcount and splits if necessary,
1506 // to compensate for the upcoming pop from the stack
1507 ai.appendWithRef(tvAsVariant(stack.indTV(i)));
1509 for (int i = 0; i < nextra_regular; ++i) {
1510 stack.popTV();
1513 for (int i = nextra_regular; i < extra; ++i, ++iter) {
1514 // appendWithRef bumps the refcount to compensate for the
1515 // eventual decref of arrayArgs.
1516 ai.appendWithRef(iter.secondValPlus());
1518 assertx(!iter); // iter should now be exhausted
1519 auto const ad = ai.create();
1520 assertx(ad->hasExactlyOneRef());
1521 if (RuntimeOption::EvalHackArrDVArrs) {
1522 stack.pushVecNoRc(ad);
1523 } else {
1524 stack.pushArrayNoRc(ad);
1527 ar->setNumArgs(f->numParams());
1529 return true;
1532 static void prepareFuncEntry(ActRec *ar, StackArgsState stk) {
1533 assertx(!ar->resumed());
1534 const Func* func = ar->m_func;
1535 Offset firstDVInitializer = InvalidAbsoluteOffset;
1536 bool raiseMissingArgumentWarnings = false;
1537 folly::Optional<uint32_t> raiseTooManyArgumentsWarnings;
1538 const int nparams = func->numNonVariadicParams();
1539 auto& stack = vmStack();
1540 ArrayData* reified_generics = nullptr;
1542 if (ar->m_func->hasReifiedGenerics()) {
1543 if (ar->hasReifiedGenerics()) {
1544 // This means that the first local is $0ReifiedGenerics
1545 reified_generics = ar->getReifiedGenerics();
1547 ar->trashReifiedGenerics();
1550 if (stk == StackArgsState::Trimmed &&
1551 (ar->func()->attrs() & AttrMayUseVV) &&
1552 ar->hasExtraArgs()) {
1553 assertx(nparams < ar->numArgs());
1554 } else if (UNLIKELY(ar->magicDispatch())) {
1555 // shuffleMagicArgs deals with everything. no need for further
1556 // argument munging
1557 shuffleMagicArgs(ar);
1558 } else {
1559 int nargs = ar->numArgs();
1560 if (UNLIKELY(nargs > nparams)) {
1561 if (LIKELY(stk != StackArgsState::Trimmed && func->discardExtraArgs())) {
1562 // In the common case, the function won't use the extra arguments,
1563 // so act as if they were never passed (NOTE: this has the effect
1564 // of slightly misleading backtraces that don't reflect the
1565 // discarded args)
1566 for (int i = nparams; i < nargs; ++i) { stack.popTV(); }
1567 ar->setNumArgs(nparams);
1568 } else if (stk == StackArgsState::Trimmed) {
1569 assertx(nargs == func->numParams());
1570 assertx(((TypedValue*)ar - stack.top()) == func->numParams());
1571 } else {
1572 shuffleExtraStackArgs(ar);
1574 raiseTooManyArgumentsWarnings = nargs;
1575 } else {
1576 if (nargs < nparams) {
1577 // Push uninitialized nulls for missing arguments. Some of them may
1578 // end up getting default-initialized, but regardless, we need to
1579 // make space for them on the stack.
1580 const Func::ParamInfoVec& paramInfo = func->params();
1581 for (int i = nargs; i < nparams; ++i) {
1582 stack.pushUninit();
1583 Offset dvInitializer = paramInfo[i].funcletOff;
1584 if (dvInitializer == InvalidAbsoluteOffset) {
1585 // We wait to raise warnings until after all the locals have been
1586 // initialized. This is important because things need to be in a
1587 // consistent state in case the user error handler throws.
1588 raiseMissingArgumentWarnings = true;
1589 } else if (firstDVInitializer == InvalidAbsoluteOffset) {
1590 // This is the first unpassed arg with a default value, so
1591 // this is where we'll need to jump to.
1592 firstDVInitializer = dvInitializer;
1596 if (UNLIKELY(func->hasVariadicCaptureParam())) {
1597 if (RuntimeOption::EvalHackArrDVArrs) {
1598 stack.pushVecNoRc(staticEmptyVecArray());
1599 } else {
1600 stack.pushArrayNoRc(staticEmptyVArray());
1603 if (func->attrs() & AttrMayUseVV) {
1604 ar->setVarEnv(nullptr);
1609 int nlocals = func->numParams();
1610 if (UNLIKELY(func->isClosureBody())) {
1611 int nuse = init_closure(ar, stack.top());
1612 // init_closure doesn't move stack
1613 stack.nalloc(nuse);
1614 nlocals += nuse;
1615 func = ar->m_func;
1618 if (ar->m_func->hasReifiedGenerics()) {
1619 // Currently does not work with closures
1620 assertx(!func->isClosureBody());
1621 if (!ar->hasReifiedGenerics()) {
1622 stack.pushUninit();
1623 } else {
1624 assertx(reified_generics != nullptr);
1625 // push for first local
1626 if (RuntimeOption::EvalHackArrDVArrs) {
1627 stack.pushStaticVec(reified_generics);
1628 } else {
1629 stack.pushStaticArray(reified_generics);
1632 nlocals++;
1635 pushFrameSlots(func, nlocals);
1637 vmfp() = ar;
1638 vmpc() = firstDVInitializer != InvalidAbsoluteOffset
1639 ? func->unit()->entry() + firstDVInitializer
1640 : func->getEntry();
1641 vmJitReturnAddr() = nullptr;
1643 // cppext functions/methods have their own logic for raising
1644 // warnings for missing arguments, so we only need to do this work
1645 // for non-cppext functions/methods
1646 if (raiseMissingArgumentWarnings && !func->isCPPBuiltin()) {
1647 HPHP::jit::raiseMissingArgument(func, ar->numArgs());
1649 if (raiseTooManyArgumentsWarnings && !func->isCPPBuiltin()) {
1650 // since shuffleExtraStackArgs changes ar->numArgs() we need to communicate
1651 // the value before it gets changed
1652 HPHP::jit::raiseTooManyArguments(func, *raiseTooManyArgumentsWarnings);
1656 namespace {
1657 // Check whether HasReifiedGenerics is set on the ActRec
1658 // Check whether the location of reified generics matches the one we expect
1659 void checkForReifiedGenericsErrors(const ActRec* ar) {
1660 if (!ar->m_func->hasReifiedGenerics()) return;
1661 if (!ar->hasReifiedGenerics()) {
1662 raise_error(Strings::REIFIED_GENERICS_NOT_GIVEN);
1664 auto const tv = frame_local(ar, ar->m_func->numParams());
1665 assertx(tv && (RuntimeOption::EvalHackArrDVArrs ? tvIsVec(tv)
1666 : tvIsArray(tv)));
1667 checkFunReifiedGenericMismatch(ar->m_func, tv->m_data.parr);
1669 } // namespace
1671 static void dispatch();
1673 void enterVMAtFunc(ActRec* enterFnAr, StackArgsState stk, VarEnv* varEnv) {
1674 assertx(enterFnAr);
1675 assertx(!enterFnAr->resumed());
1676 Stats::inc(Stats::VMEnter);
1678 const bool useJit = RID().getJit() && !RID().getJitFolding();
1679 const bool useJitPrologue = useJit && vmfp()
1680 && !enterFnAr->magicDispatch()
1681 && !varEnv
1682 && (stk != StackArgsState::Trimmed);
1683 // The jit prologues only know how to do limited amounts of work; cannot
1684 // be used for magic call/pseudo-main/extra-args already determined or
1685 // ... or if the stack args have been explicitly been prepared (e.g. via
1686 // entry as part of invoke func).
1688 if (LIKELY(useJitPrologue)) {
1689 const int np = enterFnAr->m_func->numNonVariadicParams();
1690 int na = enterFnAr->numArgs();
1691 if (na > np) na = np + 1;
1692 jit::TCA start = enterFnAr->m_func->getPrologue(na);
1693 jit::enterTCAtPrologue(enterFnAr, start);
1694 return;
1697 if (UNLIKELY(varEnv != nullptr)) {
1698 enterFnAr->setVarEnv(varEnv);
1699 assertx(enterFnAr->func()->isPseudoMain());
1700 pushFrameSlots(enterFnAr->func());
1701 auto oldFp = vmfp();
1702 if (UNLIKELY(oldFp && oldFp->skipFrame())) {
1703 oldFp = g_context->getPrevVMStateSkipFrame(oldFp);
1705 varEnv->enterFP(oldFp, enterFnAr);
1706 vmfp() = enterFnAr;
1707 vmpc() = enterFnAr->func()->getEntry();
1708 } else {
1709 prepareFuncEntry(enterFnAr, stk);
1712 if (!EventHook::FunctionCall(enterFnAr, EventHook::NormalFunc)) return;
1713 checkStack(vmStack(), enterFnAr->m_func, 0);
1714 checkForReifiedGenericsErrors(enterFnAr);
1715 calleeDynamicCallChecks(enterFnAr);
1716 checkForRequiredCallM(enterFnAr);
1717 assertx(vmfp()->func()->contains(vmpc()));
1719 if (useJit) {
1720 jit::TCA start = enterFnAr->m_func->getFuncBody();
1721 assert_flog(jit::tc::isValidCodeAddress(start),
1722 "start = {} ; func = {} ({})\n",
1723 start, enterFnAr->m_func, enterFnAr->m_func->fullName());
1724 jit::enterTCAfterPrologue(start);
1725 } else {
1726 dispatch();
1730 void enterVMAtCurPC() {
1731 assertx(vmfp());
1732 assertx(vmpc());
1733 assertx(vmfp()->func()->contains(vmpc()));
1734 Stats::inc(Stats::VMEnter);
1735 if (RID().getJit()) {
1736 jit::enterTC();
1737 } else {
1738 dispatch();
1743 * Helper for function entry, including pseudo-main entry.
1745 void pushFrameSlots(const Func* func, int nparams /*= 0*/) {
1746 // Push locals.
1747 for (int i = nparams; i < func->numLocals(); i++) {
1748 vmStack().pushUninit();
1750 // Push iterators.
1751 for (int i = 0; i < func->numIterators(); i++) {
1752 vmStack().allocI();
1754 vmStack().allocClsRefSlots(func->numClsRefSlots());
1757 void unwindPreventReturnToTC(ActRec* ar) {
1758 auto const savedRip = reinterpret_cast<jit::TCA>(ar->m_savedRip);
1759 always_assert_flog(jit::tc::isValidCodeAddress(savedRip),
1760 "preventReturnToTC({}): {} isn't in TC",
1761 ar, savedRip);
1763 if (isReturnHelper(savedRip)) return;
1765 auto& ustubs = jit::tc::ustubs();
1766 if (ar->resumed()) {
1767 // async functions use callToExit stub
1768 assertx(ar->func()->isGenerator());
1769 ar->setJitReturn(ar->func()->isAsync()
1770 ? ustubs.asyncGenRetHelper : ustubs.genRetHelper);
1771 } else {
1772 ar->setJitReturn(ustubs.retHelper);
1776 void debuggerPreventReturnToTC(ActRec* ar) {
1777 auto const savedRip = reinterpret_cast<jit::TCA>(ar->m_savedRip);
1778 always_assert_flog(jit::tc::isValidCodeAddress(savedRip),
1779 "preventReturnToTC({}): {} isn't in TC",
1780 ar, savedRip);
1782 if (isReturnHelper(savedRip) || isDebuggerReturnHelper(savedRip)) return;
1784 // We're going to smash the return address. Before we do, save the catch
1785 // block attached to the call in a side table so the return helpers and
1786 // unwinder can find it when needed.
1787 jit::stashDebuggerCatch(ar);
1789 auto& ustubs = jit::tc::ustubs();
1790 if (ar->resumed()) {
1791 // async functions use callToExit stub
1792 assertx(ar->func()->isGenerator());
1793 ar->setJitReturn(ar->func()->isAsync()
1794 ? ustubs.debuggerAsyncGenRetHelper : ustubs.debuggerGenRetHelper);
1795 } else {
1796 ar->setJitReturn(ustubs.debuggerRetHelper);
1800 // Walk the stack and find any return address to jitted code and bash it to the
1801 // appropriate RetFromInterpreted*Frame helper. This ensures that we don't
1802 // return into jitted code and gives the system the proper chance to interpret
1803 // blacklisted tracelets.
1804 void debuggerPreventReturnsToTC() {
1805 assertx(isDebuggerAttached());
1806 if (!RuntimeOption::EvalJit) return;
1808 auto& ec = *g_context;
1809 for (auto ar = vmfp(); ar; ar = ec.getPrevVMState(ar)) {
1810 debuggerPreventReturnToTC(ar);
1814 static inline StringData* lookup_name(TypedValue* key) {
1815 return prepareKey(*key);
1818 static inline void lookup_gbl(ActRec* /*fp*/, StringData*& name,
1819 TypedValue* key, TypedValue*& val) {
1820 name = lookup_name(key);
1821 assertx(g_context->m_globalVarEnv);
1822 val = g_context->m_globalVarEnv->lookup(name);
1825 static inline void lookupd_gbl(ActRec* /*fp*/, StringData*& name,
1826 TypedValue* key, TypedValue*& val) {
1827 name = lookup_name(key);
1828 assertx(g_context->m_globalVarEnv);
1829 VarEnv* varEnv = g_context->m_globalVarEnv;
1830 val = varEnv->lookup(name);
1831 if (val == nullptr) {
1832 TypedValue tv;
1833 tvWriteNull(tv);
1834 varEnv->set(name, &tv);
1835 val = varEnv->lookup(name);
1839 static inline void lookup_sprop(ActRec* fp,
1840 Class* cls,
1841 StringData*& name,
1842 TypedValue* key,
1843 TypedValue*& val,
1844 Slot& slot,
1845 bool& visible,
1846 bool& accessible,
1847 bool ignoreLateInit) {
1848 name = lookup_name(key);
1849 auto const ctx = arGetContextClass(fp);
1851 auto const lookup = ignoreLateInit
1852 ? cls->getSPropIgnoreLateInit(ctx, name)
1853 : cls->getSProp(ctx, name);
1855 val = lookup.val;
1856 slot = lookup.slot;
1857 visible = lookup.val != nullptr;
1858 accessible = lookup.accessible;
1861 static inline Class* lookupClsRef(Cell* input) {
1862 Class* class_ = nullptr;
1863 if (isStringType(input->m_type)) {
1864 class_ = Unit::loadClass(input->m_data.pstr);
1865 if (class_ == nullptr) {
1866 raise_error(Strings::UNKNOWN_CLASS, input->m_data.pstr->data());
1868 } else if (input->m_type == KindOfObject) {
1869 class_ = input->m_data.pobj->getVMClass();
1870 } else if (isClassType(input->m_type)) {
1871 class_ = input->m_data.pclass;
1872 } else {
1873 raise_error("Cls: Expected string or object");
1875 return class_;
1878 static UNUSED int innerCount(TypedValue tv) {
1879 return isRefcountedType(tv.m_type) ? tvGetCount(tv) : -1;
1882 static inline tv_lval ratchetRefs(tv_lval result,
1883 TypedValue& tvRef,
1884 TypedValue& tvRef2) {
1885 TRACE(5, "Ratchet: result %p(k%d c%d), ref %p(k%d c%d) ref2 %p(k%d c%d)\n",
1886 &val(result), static_cast<data_type_t>(result.type()),
1887 innerCount(*result),
1888 &tvRef, static_cast<data_type_t>(tvRef.m_type), innerCount(tvRef),
1889 &tvRef2, static_cast<data_type_t>(tvRef2.m_type), innerCount(tvRef2));
1890 // Due to complications associated with ArrayAccess, it is possible to acquire
1891 // a reference as a side effect of vector operation processing. Such a
1892 // reference must be retained until after the next iteration is complete.
1893 // Therefore, move the reference from tvRef to tvRef2, so that the reference
1894 // will be released one iteration later. But only do this if tvRef was used in
1895 // this iteration, otherwise we may wipe out the last reference to something
1896 // that we need to stay alive until the next iteration.
1897 if (tvRef.m_type != KindOfUninit) {
1898 if (isRefcountedType(tvRef2.m_type)) {
1899 tvDecRefCountable(&tvRef2);
1900 TRACE(5, "Ratchet: decref tvref2\n");
1901 tvWriteUninit(tvRef2);
1904 memcpy(&tvRef2, &tvRef, sizeof(TypedValue));
1905 tvWriteUninit(tvRef);
1906 // Update result to point to relocated reference. This can be done
1907 // unconditionally here because we maintain the invariant throughout that
1908 // either tvRef is KindOfUninit, or tvRef contains a valid object that
1909 // result points to.
1910 assertx(&val(result) == &tvRef.m_data);
1911 return tv_lval(&tvRef2);
1914 assertx(&val(result) != &tvRef.m_data);
1915 return result;
1919 * One iop* function exists for every bytecode. They all take a single PC&
1920 * argument, which should be left pointing to the next bytecode to execute when
1921 * the instruction is complete. Most return void, though a few return a
1922 * jit::TCA. The ones that return a TCA return a non-nullptr value to indicate
1923 * that the caller must resume execution in the TC at the returned
1924 * address. This is used to maintain certain invariants about how we get into
1925 * and out of VM frames in jitted code; see comments on jitReturnPre() for more
1926 * details.
1929 OPTBLD_INLINE void iopNop() {
1932 OPTBLD_INLINE void iopEntryNop() {
1935 OPTBLD_INLINE void iopDiscardClsRef(clsref_slot slot) {
1936 slot.take();
1939 OPTBLD_INLINE void iopPopC() {
1940 vmStack().popC();
1943 OPTBLD_INLINE void iopPopV() {
1944 vmStack().popV();
1947 OPTBLD_INLINE void iopPopU() {
1948 vmStack().popU();
1951 OPTBLD_INLINE void iopPopU2() {
1952 assertx(vmStack().indC(1)->m_type == KindOfUninit);
1953 *vmStack().indC(1) = *vmStack().topC();
1954 vmStack().discard();
1957 OPTBLD_INLINE void iopPopL(local_var to) {
1958 assertx(to.index < vmfp()->m_func->numLocals());
1959 Cell* fr = vmStack().topC();
1960 if (isRefType(to->m_type) || vmfp()->m_func->isPseudoMain()) {
1961 // Manipulate the ref-counts as if this was a SetL, PopC pair to preserve
1962 // destructor ordering.
1963 tvSet(*fr, *to);
1964 vmStack().popC();
1965 } else {
1966 cellMove(*fr, *to);
1967 vmStack().discard();
1971 OPTBLD_INLINE void iopDup() {
1972 vmStack().dup();
1975 OPTBLD_INLINE void iopBox() {
1976 vmStack().box();
1979 OPTBLD_INLINE void iopUnbox() {
1980 vmStack().unbox();
1983 OPTBLD_INLINE void iopCGetCUNop() {
1986 OPTBLD_INLINE void iopUGetCUNop() {
1989 OPTBLD_INLINE void iopNull() {
1990 vmStack().pushNull();
1993 OPTBLD_INLINE void iopNullUninit() {
1994 vmStack().pushNullUninit();
1997 OPTBLD_INLINE void iopTrue() {
1998 vmStack().pushBool(true);
2001 OPTBLD_INLINE void iopFalse() {
2002 vmStack().pushBool(false);
2005 OPTBLD_INLINE void iopFile() {
2006 auto s = vmfp()->m_func->unit()->filepath();
2007 vmStack().pushStaticString(s);
2010 OPTBLD_INLINE void iopDir() {
2011 auto s = vmfp()->m_func->unit()->dirpath();
2012 vmStack().pushStaticString(s);
2015 OPTBLD_INLINE void iopMethod() {
2016 auto s = vmfp()->m_func->fullName();
2017 vmStack().pushStaticString(s);
2020 OPTBLD_INLINE void iopClsRefName(clsref_slot slot) {
2021 auto const cls = slot.take().second;
2022 auto const name = cls->name();
2023 vmStack().pushStaticString(name);
2026 OPTBLD_INLINE void iopInt(int64_t imm) {
2027 vmStack().pushInt(imm);
2030 OPTBLD_INLINE void iopDouble(double imm) {
2031 vmStack().pushDouble(imm);
2034 OPTBLD_INLINE void iopString(const StringData* s) {
2035 vmStack().pushStaticString(s);
2038 OPTBLD_INLINE void iopArray(const ArrayData* a) {
2039 assertx(a->isPHPArray());
2040 assertx(!RuntimeOption::EvalHackArrDVArrs || a->isNotDVArray());
2041 vmStack().pushStaticArray(a);
2044 OPTBLD_INLINE void iopDict(const ArrayData* a) {
2045 assertx(a->isDict());
2046 vmStack().pushStaticDict(a);
2049 OPTBLD_INLINE void iopKeyset(const ArrayData* a) {
2050 assertx(a->isKeyset());
2051 vmStack().pushStaticKeyset(a);
2054 OPTBLD_INLINE void iopVec(const ArrayData* a) {
2055 assertx(a->isVecArray());
2056 vmStack().pushStaticVec(a);
2059 OPTBLD_INLINE void iopNewArray(uint32_t capacity) {
2060 if (capacity == 0) {
2061 vmStack().pushArrayNoRc(staticEmptyArray());
2062 } else {
2063 vmStack().pushArrayNoRc(PackedArray::MakeReserve(capacity));
2067 OPTBLD_INLINE void iopNewMixedArray(uint32_t capacity) {
2068 if (capacity == 0) {
2069 vmStack().pushArrayNoRc(staticEmptyArray());
2070 } else {
2071 vmStack().pushArrayNoRc(MixedArray::MakeReserveMixed(capacity));
2075 OPTBLD_INLINE void iopNewDictArray(uint32_t capacity) {
2076 if (capacity == 0) {
2077 vmStack().pushDictNoRc(staticEmptyDictArray());
2078 } else {
2079 vmStack().pushDictNoRc(MixedArray::MakeReserveDict(capacity));
2083 OPTBLD_INLINE
2084 void iopNewLikeArrayL(local_var fr, uint32_t capacity) {
2085 ArrayData* arr;
2086 if (LIKELY(isArrayType(fr->m_type))) {
2087 arr = MixedArray::MakeReserveLike(fr->m_data.parr, capacity);
2088 } else {
2089 if (capacity == 0) capacity = PackedArray::SmallSize;
2090 arr = PackedArray::MakeReserve(capacity);
2092 vmStack().pushArrayNoRc(arr);
2095 OPTBLD_INLINE void iopNewPackedArray(uint32_t n) {
2096 // This constructor moves values, no inc/decref is necessary.
2097 auto* a = PackedArray::MakePacked(n, vmStack().topC());
2098 vmStack().ndiscard(n);
2099 vmStack().pushArrayNoRc(a);
2102 namespace {
2104 template <typename F>
2105 ArrayData* newStructArrayImpl(imm_array<int32_t> ids, F f) {
2106 auto const n = ids.size;
2107 assertx(n > 0 && n <= ArrayData::MaxElemsOnStack);
2108 req::vector<const StringData*> names;
2109 names.reserve(n);
2110 auto unit = vmfp()->m_func->unit();
2111 for (size_t i = 0; i < n; ++i) {
2112 auto name = unit->lookupLitstrId(ids[i]);
2113 names.push_back(name);
2116 // This constructor moves values, no inc/decref is necessary.
2117 auto const a = f(n, names.data(), vmStack().topC())->asArrayData();
2118 vmStack().ndiscard(n);
2119 return a;
2124 OPTBLD_INLINE void iopNewStructArray(imm_array<int32_t> ids) {
2125 auto const a = newStructArrayImpl(ids, MixedArray::MakeStruct);
2126 vmStack().pushArrayNoRc(a);
2129 OPTBLD_INLINE void iopNewStructDArray(imm_array<int32_t> ids) {
2130 assertx(!RuntimeOption::EvalHackArrDVArrs);
2131 auto const a = newStructArrayImpl(ids, MixedArray::MakeStructDArray);
2132 vmStack().pushArrayNoRc(a);
2135 OPTBLD_INLINE void iopNewStructDict(imm_array<int32_t> ids) {
2136 auto const a = newStructArrayImpl(ids, MixedArray::MakeStructDict);
2137 vmStack().pushDictNoRc(a);
2140 OPTBLD_INLINE void iopNewVecArray(uint32_t n) {
2141 // This constructor moves values, no inc/decref is necessary.
2142 auto* a = PackedArray::MakeVec(n, vmStack().topC());
2143 vmStack().ndiscard(n);
2144 vmStack().pushVecNoRc(a);
2147 OPTBLD_INLINE void iopNewKeysetArray(uint32_t n) {
2148 // This constructor moves values, no inc/decref is necessary.
2149 auto* a = SetArray::MakeSet(n, vmStack().topC());
2150 vmStack().ndiscard(n);
2151 vmStack().pushKeysetNoRc(a);
2154 OPTBLD_INLINE void iopNewVArray(uint32_t n) {
2155 assertx(!RuntimeOption::EvalHackArrDVArrs);
2156 // This constructor moves values, no inc/decref is necessary.
2157 auto a = PackedArray::MakeVArray(n, vmStack().topC());
2158 vmStack().ndiscard(n);
2159 vmStack().pushArrayNoRc(a);
2162 OPTBLD_INLINE void iopNewDArray(uint32_t capacity) {
2163 assertx(!RuntimeOption::EvalHackArrDVArrs);
2164 if (capacity == 0) {
2165 vmStack().pushArrayNoRc(staticEmptyDArray());
2166 } else {
2167 vmStack().pushArrayNoRc(MixedArray::MakeReserveDArray(capacity));
2171 // TODO (T29595301): Use id instead of StringData
2172 OPTBLD_INLINE void iopNewRecord(const StringData* s, imm_array<int32_t> ids) {
2173 auto rec = Unit::loadRecord(s);
2174 if (!rec) {
2175 raise_error(Strings::UNKNOWN_RECORD, s->data());
2177 auto const n = ids.size;
2178 assertx(n > 0 && n <= ArrayData::MaxElemsOnStack);
2179 req::vector<const StringData*> names;
2180 names.reserve(n);
2181 auto const unit = vmfp()->m_func->unit();
2182 for (size_t i = 0; i < n; ++i) {
2183 auto name = unit->lookupLitstrId(ids[i]);
2184 names.push_back(name);
2186 auto recdata =
2187 RecordData::newRecord(rec, names.size(), names.data(), vmStack().topC());
2188 vmStack().ndiscard(n);
2189 vmStack().pushRecordNoRc(recdata);
2192 OPTBLD_INLINE void iopAddElemC() {
2193 Cell* c1 = vmStack().topC();
2194 Cell* c2 = vmStack().indC(1);
2195 Cell* c3 = vmStack().indC(2);
2196 if (!isArrayType(c3->m_type) && !isDictType(c3->m_type)) {
2197 raise_error("AddElemC: $3 must be an array or dict");
2199 if (c2->m_type == KindOfInt64) {
2200 cellAsVariant(*c3).asArrRef().set(c2->m_data.num, tvAsCVarRef(c1));
2201 } else {
2202 cellAsVariant(*c3).asArrRef().set(tvAsCVarRef(c2), tvAsCVarRef(c1));
2204 vmStack().popC();
2205 vmStack().popC();
2208 OPTBLD_INLINE void iopAddElemV() {
2209 Ref* r1 = vmStack().topV();
2210 Cell* c2 = vmStack().indC(1);
2211 Cell* c3 = vmStack().indC(2);
2212 if (!isArrayType(c3->m_type)) {
2213 raise_error("AddElemV: $3 must be an array");
2215 if (c2->m_type == KindOfInt64) {
2216 cellAsVariant(*c3).asArrRef().setRef(c2->m_data.num, tvAsVariant(r1));
2217 } else {
2218 cellAsVariant(*c3).asArrRef().setRef(tvAsCVarRef(c2), tvAsVariant(r1));
2220 vmStack().popV();
2221 vmStack().popC();
2224 OPTBLD_INLINE void iopAddNewElemC() {
2225 Cell* c1 = vmStack().topC();
2226 Cell* c2 = vmStack().indC(1);
2227 if (isArrayType(c2->m_type)) {
2228 cellAsVariant(*c2).asArrRef().append(tvAsCVarRef(c1));
2229 } else if (isVecType(c2->m_type)) {
2230 auto in = c2->m_data.parr;
2231 auto out = PackedArray::AppendVec(in, *c1);
2232 if (in != out) decRefArr(in);
2233 c2->m_type = KindOfVec;
2234 c2->m_data.parr = out;
2235 } else if (isKeysetType(c2->m_type)) {
2236 auto in = c2->m_data.parr;
2237 auto out = SetArray::Append(in, *c1);
2238 if (in != out) decRefArr(in);
2239 c2->m_type = KindOfKeyset;
2240 c2->m_data.parr = out;
2241 } else {
2242 raise_error("AddNewElemC: $2 must be an array, vec, or keyset");
2244 assertx(cellIsPlausible(*c2));
2245 vmStack().popC();
2248 OPTBLD_INLINE void iopAddNewElemV() {
2249 Ref* r1 = vmStack().topV();
2250 Cell* c2 = vmStack().indC(1);
2251 if (!isArrayType(c2->m_type)) {
2252 raise_error("AddNewElemV: $2 must be an array");
2254 cellAsVariant(*c2).asArrRef().appendRef(tvAsVariant(r1));
2255 vmStack().popV();
2258 OPTBLD_INLINE void iopNewCol(CollectionType cType) {
2259 assertx(cType != CollectionType::Pair);
2260 // Incref the collection object during construction.
2261 auto obj = collections::alloc(cType);
2262 vmStack().pushObjectNoRc(obj);
2265 OPTBLD_INLINE void iopNewPair() {
2266 Cell* c1 = vmStack().topC();
2267 Cell* c2 = vmStack().indC(1);
2268 // elements were pushed onto the stack in the order they should appear
2269 // in the pair, so the top of the stack should become the second element
2270 auto pair = collections::allocPair(*c2, *c1);
2271 // This constructor moves values, no inc/decref is necessary.
2272 vmStack().ndiscard(2);
2273 vmStack().pushObjectNoRc(pair);
2276 OPTBLD_INLINE void iopColFromArray(CollectionType cType) {
2277 assertx(cType != CollectionType::Pair);
2278 auto const c1 = vmStack().topC();
2279 if (cType == CollectionType::Vector || cType == CollectionType::ImmVector) {
2280 if (UNLIKELY(!isVecType(c1->m_type))) {
2281 raise_error("ColFromArray: $1 must be a Vec when creating an "
2282 "(Imm)Vector");
2284 } else if (UNLIKELY(!isDictType(c1->m_type))) {
2285 raise_error("ColFromArray: $1 must be a Dict when creating an (Imm)Set "
2286 "or an (Imm)Map");
2288 // This constructor reassociates the ArrayData with the collection, so no
2289 // inc/decref is needed for the array. The collection object itself is
2290 // increfed.
2291 auto obj = collections::alloc(cType, c1->m_data.parr);
2292 vmStack().discard();
2293 vmStack().pushObjectNoRc(obj);
2296 OPTBLD_INLINE void iopCnsE(const StringData* s) {
2297 auto const cns = Unit::loadCns(s);
2298 if (cns == nullptr) {
2299 raise_error("Undefined constant '%s'", s->data());
2301 auto const c1 = vmStack().allocC();
2302 cellDup(*cns, *c1);
2305 OPTBLD_INLINE void iopDefCns(const StringData* s) {
2306 bool result = Unit::defCns(s, vmStack().topTV());
2307 vmStack().replaceTV<KindOfBoolean>(result);
2310 OPTBLD_INLINE void iopClsCns(const StringData* clsCnsName, clsref_slot slot) {
2311 auto const cls = slot.take().second;
2312 auto const clsCns = cls->clsCnsGet(clsCnsName);
2314 if (clsCns.m_type == KindOfUninit) {
2315 raise_error("Couldn't find constant %s::%s",
2316 cls->name()->data(), clsCnsName->data());
2319 cellDup(clsCns, *vmStack().allocTV());
2322 OPTBLD_INLINE void iopClsCnsD(const StringData* clsCnsName, Id classId) {
2323 const NamedEntityPair& classNamedEntity =
2324 vmfp()->m_func->unit()->lookupNamedEntityPairId(classId);
2325 auto const clsCns = g_context->lookupClsCns(classNamedEntity.second,
2326 classNamedEntity.first, clsCnsName);
2327 auto const c1 = vmStack().allocC();
2328 cellDup(clsCns, *c1);
2331 OPTBLD_FLT_INLINE void iopConcat() {
2332 auto const c1 = vmStack().topC();
2333 auto const c2 = vmStack().indC(1);
2334 auto const s2 = cellAsVariant(*c2).toString();
2335 auto const s1 = cellAsCVarRef(*c1).toString();
2336 cellAsVariant(*c2) = concat(s2, s1);
2337 assertx(c2->m_data.pstr->checkCount());
2338 vmStack().popC();
2341 OPTBLD_INLINE void iopConcatN(uint32_t n) {
2342 auto const c1 = vmStack().topC();
2343 auto const c2 = vmStack().indC(1);
2345 if (n == 2) {
2346 auto const s2 = cellAsVariant(*c2).toString();
2347 auto const s1 = cellAsCVarRef(*c1).toString();
2348 cellAsVariant(*c2) = concat(s2, s1);
2349 assertx(c2->m_data.pstr->checkCount());
2350 } else if (n == 3) {
2351 auto const c3 = vmStack().indC(2);
2352 auto const s3 = cellAsVariant(*c3).toString();
2353 auto const s2 = cellAsCVarRef(*c2).toString();
2354 auto const s1 = cellAsCVarRef(*c1).toString();
2355 cellAsVariant(*c3) = concat3(s3, s2, s1);
2356 assertx(c3->m_data.pstr->checkCount());
2357 } else {
2358 assertx(n == 4);
2359 auto const c3 = vmStack().indC(2);
2360 auto const c4 = vmStack().indC(3);
2361 auto const s4 = cellAsVariant(*c4).toString();
2362 auto const s3 = cellAsCVarRef(*c3).toString();
2363 auto const s2 = cellAsCVarRef(*c2).toString();
2364 auto const s1 = cellAsCVarRef(*c1).toString();
2365 cellAsVariant(*c4) = concat4(s4, s3, s2, s1);
2366 assertx(c4->m_data.pstr->checkCount());
2369 for (int i = 1; i < n; ++i) {
2370 vmStack().popC();
2374 OPTBLD_INLINE void iopNot() {
2375 Cell* c1 = vmStack().topC();
2376 cellAsVariant(*c1) = !cellAsVariant(*c1).toBoolean();
2379 template<class Fn>
2380 OPTBLD_INLINE void implCellBinOp(Fn fn) {
2381 auto const c1 = vmStack().topC();
2382 auto const c2 = vmStack().indC(1);
2383 auto const result = fn(*c2, *c1);
2384 tvDecRefGen(c2);
2385 *c2 = result;
2386 vmStack().popC();
2389 template<class Fn>
2390 OPTBLD_INLINE void implCellBinOpBool(Fn fn) {
2391 auto const c1 = vmStack().topC();
2392 auto const c2 = vmStack().indC(1);
2393 bool const result = fn(*c2, *c1);
2394 tvDecRefGen(c2);
2395 *c2 = make_tv<KindOfBoolean>(result);
2396 vmStack().popC();
2399 template<class Fn>
2400 OPTBLD_INLINE void implCellBinOpInt64(Fn fn) {
2401 auto const c1 = vmStack().topC();
2402 auto const c2 = vmStack().indC(1);
2403 auto const result = fn(*c2, *c1);
2404 tvDecRefGen(c2);
2405 *c2 = make_tv<KindOfInt64>(result);
2406 vmStack().popC();
2409 OPTBLD_INLINE void iopAdd() {
2410 implCellBinOp(cellAdd);
2413 OPTBLD_INLINE void iopSub() {
2414 implCellBinOp(cellSub);
2417 OPTBLD_INLINE void iopMul() {
2418 implCellBinOp(cellMul);
2421 OPTBLD_INLINE void iopAddO() {
2422 implCellBinOp(cellAddO);
2425 OPTBLD_INLINE void iopSubO() {
2426 implCellBinOp(cellSubO);
2429 OPTBLD_INLINE void iopMulO() {
2430 implCellBinOp(cellMulO);
2433 OPTBLD_INLINE void iopDiv() {
2434 implCellBinOp(cellDiv);
2437 OPTBLD_INLINE void iopPow() {
2438 implCellBinOp(cellPow);
2441 OPTBLD_INLINE void iopMod() {
2442 implCellBinOp(cellMod);
2445 OPTBLD_INLINE void iopBitAnd() {
2446 implCellBinOp(cellBitAnd);
2449 OPTBLD_INLINE void iopBitOr() {
2450 implCellBinOp(cellBitOr);
2453 OPTBLD_INLINE void iopBitXor() {
2454 implCellBinOp(cellBitXor);
2457 OPTBLD_INLINE void iopXor() {
2458 implCellBinOpBool([&] (Cell c1, Cell c2) -> bool {
2459 return cellToBool(c1) ^ cellToBool(c2);
2463 OPTBLD_INLINE void iopSame() {
2464 implCellBinOpBool(cellSame);
2467 OPTBLD_INLINE void iopNSame() {
2468 implCellBinOpBool([&] (Cell c1, Cell c2) {
2469 return !cellSame(c1, c2);
2473 OPTBLD_INLINE void iopEq() {
2474 implCellBinOpBool([&] (Cell c1, Cell c2) {
2475 return cellEqual(c1, c2);
2479 OPTBLD_INLINE void iopNeq() {
2480 implCellBinOpBool([&] (Cell c1, Cell c2) {
2481 return !cellEqual(c1, c2);
2485 OPTBLD_INLINE void iopLt() {
2486 implCellBinOpBool([&] (Cell c1, Cell c2) {
2487 return cellLess(c1, c2);
2491 OPTBLD_INLINE void iopLte() {
2492 implCellBinOpBool(cellLessOrEqual);
2495 OPTBLD_INLINE void iopGt() {
2496 implCellBinOpBool([&] (Cell c1, Cell c2) {
2497 return cellGreater(c1, c2);
2501 OPTBLD_INLINE void iopGte() {
2502 implCellBinOpBool(cellGreaterOrEqual);
2505 OPTBLD_INLINE void iopCmp() {
2506 implCellBinOpInt64([&] (Cell c1, Cell c2) {
2507 return cellCompare(c1, c2);
2511 OPTBLD_INLINE void iopShl() {
2512 implCellBinOp(cellShl);
2515 OPTBLD_INLINE void iopShr() {
2516 implCellBinOp(cellShr);
2519 OPTBLD_INLINE void iopBitNot() {
2520 cellBitNot(*vmStack().topC());
2523 OPTBLD_INLINE void iopCastBool() {
2524 Cell* c1 = vmStack().topC();
2525 tvCastToBooleanInPlace(c1);
2528 OPTBLD_INLINE void iopCastInt() {
2529 Cell* c1 = vmStack().topC();
2530 tvCastToInt64InPlace(c1);
2533 OPTBLD_INLINE void iopCastDouble() {
2534 Cell* c1 = vmStack().topC();
2535 tvCastToDoubleInPlace(c1);
2538 OPTBLD_INLINE void iopCastString() {
2539 Cell* c1 = vmStack().topC();
2540 tvCastToStringInPlace(c1);
2543 OPTBLD_INLINE void iopCastArray() {
2544 Cell* c1 = vmStack().topC();
2545 tvCastToArrayInPlace(c1);
2548 OPTBLD_INLINE void iopCastObject() {
2549 Cell* c1 = vmStack().topC();
2550 tvCastToObjectInPlace(c1);
2553 OPTBLD_INLINE void iopCastDict() {
2554 Cell* c1 = vmStack().topC();
2555 tvCastToDictInPlace(c1);
2558 OPTBLD_INLINE void iopCastKeyset() {
2559 Cell* c1 = vmStack().topC();
2560 tvCastToKeysetInPlace(c1);
2563 OPTBLD_INLINE void iopCastVec() {
2564 Cell* c1 = vmStack().topC();
2565 tvCastToVecInPlace(c1);
2568 OPTBLD_INLINE void iopCastVArray() {
2569 assertx(!RuntimeOption::EvalHackArrDVArrs);
2570 Cell* c1 = vmStack().topC();
2571 tvCastToVArrayInPlace(c1);
2574 OPTBLD_INLINE void iopCastDArray() {
2575 assertx(!RuntimeOption::EvalHackArrDVArrs);
2576 Cell* c1 = vmStack().topC();
2577 tvCastToDArrayInPlace(c1);
2580 OPTBLD_INLINE void iopDblAsBits() {
2581 auto c = vmStack().topC();
2582 if (UNLIKELY(!isDoubleType(c->m_type))) {
2583 vmStack().replaceC<KindOfInt64>(0);
2584 return;
2586 c->m_type = KindOfInt64;
2589 ALWAYS_INLINE
2590 bool implInstanceOfHelper(const StringData* str1, Cell* c2) {
2591 const NamedEntity* rhs = NamedEntity::get(str1, false);
2592 // Because of other codepaths, an un-normalized name might enter the
2593 // table without a Class* so we need to check if it's there.
2594 if (LIKELY(rhs && rhs->getCachedClass() != nullptr)) {
2595 return cellInstanceOf(c2, rhs);
2597 return false;
2600 OPTBLD_INLINE void iopInstanceOf() {
2601 Cell* c1 = vmStack().topC(); // c2 instanceof c1
2602 Cell* c2 = vmStack().indC(1);
2603 bool r = false;
2604 if (isStringType(c1->m_type)) {
2605 r = implInstanceOfHelper(c1->m_data.pstr, c2);
2606 } else if (c1->m_type == KindOfObject) {
2607 if (c2->m_type == KindOfObject) {
2608 ObjectData* lhs = c2->m_data.pobj;
2609 ObjectData* rhs = c1->m_data.pobj;
2610 r = lhs->instanceof(rhs->getVMClass());
2612 } else if (isClassType(c1->m_type)) {
2613 // TODO (T29639296) Exploit class pointer further
2614 r = implInstanceOfHelper(c1->m_data.pclass->name(), c2);
2615 } else {
2616 raise_error("Class name must be a valid object or a string");
2618 vmStack().popC();
2619 vmStack().replaceC<KindOfBoolean>(r);
2622 OPTBLD_INLINE void iopInstanceOfD(Id id) {
2623 const NamedEntity* ne = vmfp()->m_func->unit()->lookupNamedEntityId(id);
2624 Cell* c1 = vmStack().topC();
2625 bool r = cellInstanceOf(c1, ne);
2626 vmStack().replaceC<KindOfBoolean>(r);
2629 OPTBLD_INLINE void iopIsLateBoundCls() {
2630 auto const cls = frameStaticClass(vmfp());
2631 if (!cls) {
2632 raise_error(HPHP::Strings::THIS_OUTSIDE_CLASS);
2634 if (isTrait(cls)) {
2635 raise_error("\"is\" and \"as\" operators cannot be used with a trait");
2637 auto const c1 = vmStack().topC();
2638 bool r = cellInstanceOf(c1, cls);
2639 vmStack().replaceC<KindOfBoolean>(r);
2642 namespace {
2644 ArrayData* resolveAndVerifyTypeStructureHelper(
2645 uint32_t n, const TypedValue* values, bool suppress, bool isOrAsOp) {
2646 Class* declaringCls = nullptr;
2647 Class* calledCls = nullptr;
2648 auto const v = *values;
2649 isValidTSType(v, true);
2650 if (typeStructureCouldBeNonStatic(ArrNR(v.m_data.parr))) {
2651 auto const frame = vmfp();
2652 if (frame && frame->func()) {
2653 declaringCls = frame->func()->cls();
2654 if (declaringCls) {
2655 calledCls = frame->hasClass()
2656 ? frame->getClass()
2657 : frame->getThis()->getVMClass();
2661 return jit::resolveTypeStructHelper(n, values, declaringCls,
2662 calledCls, suppress, isOrAsOp);
2665 ALWAYS_INLINE ArrayData* maybeResolveAndErrorOnTypeStructure(
2666 TypeStructResolveOp op,
2667 bool suppress
2669 auto const a = vmStack().topC();
2670 isValidTSType(*a, true);
2672 if (op == TypeStructResolveOp::Resolve) {
2673 return resolveAndVerifyTypeStructureHelper(1, vmStack().topC(),
2674 suppress, true);
2676 errorOnIsAsExpressionInvalidTypes(ArrNR(a->m_data.parr));
2677 return a->m_data.parr;
2680 } // namespace
2682 OPTBLD_INLINE void iopIsTypeStructC(TypeStructResolveOp op) {
2683 auto const c = vmStack().indC(1);
2684 auto const ts = maybeResolveAndErrorOnTypeStructure(op, true);
2685 auto b = checkTypeStructureMatchesCell(ArrNR(ts), *c);
2686 vmStack().popC(); // pop ts
2687 vmStack().replaceC<KindOfBoolean>(b);
2690 OPTBLD_INLINE void iopAsTypeStructC(TypeStructResolveOp op) {
2691 auto const c = vmStack().indC(1);
2692 auto const ts = maybeResolveAndErrorOnTypeStructure(op, false);
2693 std::string givenType, expectedType, errorKey;
2694 if (!checkTypeStructureMatchesCell(
2695 ArrNR(ts), *c, givenType, expectedType, errorKey)) {
2696 throwTypeStructureDoesNotMatchCellException(
2697 givenType, expectedType, errorKey);
2699 vmStack().popC(); // pop ts
2702 OPTBLD_INLINE void iopCombineAndResolveTypeStruct(uint32_t n) {
2703 assertx(n != 0);
2704 auto const resolved =
2705 resolveAndVerifyTypeStructureHelper(n, vmStack().topC(), false, false);
2706 vmStack().popC(); // pop the first TS
2707 vmStack().ndiscard(n-1);
2708 if (RuntimeOption::EvalHackArrDVArrs) {
2709 vmStack().pushDict(resolved);
2710 } else {
2711 vmStack().pushArray(resolved);
2715 OPTBLD_INLINE void iopRecordReifiedGeneric(uint32_t n) {
2716 assertx(n != 0);
2717 auto const tsList =
2718 jit::recordReifiedGenericsAndGetTSList(n, vmStack().topC());
2719 vmStack().ndiscard(n);
2720 if (RuntimeOption::EvalHackArrDVArrs) {
2721 vmStack().pushStaticVec(tsList);
2722 } else {
2723 vmStack().pushStaticArray(tsList);
2727 OPTBLD_INLINE void iopReifiedName(uint32_t n, const StringData* name) {
2728 assertx(n != 0);
2729 auto const result = jit::recordReifiedGenericsAndGetName(n, vmStack().topC());
2730 auto const mangledName = mangleReifiedName(name, result);
2731 vmStack().ndiscard(n);
2732 vmStack().pushStaticString(mangledName);
2735 OPTBLD_INLINE void iopCheckReifiedGenericMismatch() {
2736 Class* cls = arGetContextClass(vmfp());
2737 if (!cls) raise_error("No class scope is active");
2738 auto const c = vmStack().topC();
2739 assertx(tvIsVecOrVArray(c));
2740 checkClassReifiedGenericMismatch(cls, c->m_data.parr);
2741 vmStack().popC();
2744 OPTBLD_INLINE void iopPrint() {
2745 Cell* c1 = vmStack().topC();
2746 g_context->write(cellAsVariant(*c1).toString());
2747 vmStack().replaceC<KindOfInt64>(1);
2750 OPTBLD_INLINE void iopClone() {
2751 TypedValue* tv = vmStack().topTV();
2752 if (tv->m_type != KindOfObject) {
2753 raise_error("clone called on non-object");
2755 ObjectData* obj = tv->m_data.pobj;
2756 const Class* class_ UNUSED = obj->getVMClass();
2757 ObjectData* newobj = obj->clone();
2758 vmStack().popTV();
2759 vmStack().pushNull();
2760 tv->m_type = KindOfObject;
2761 tv->m_data.pobj = newobj;
2764 OPTBLD_INLINE void iopExit() {
2765 int exitCode = 0;
2766 Cell* c1 = vmStack().topC();
2767 if (c1->m_type == KindOfInt64) {
2768 exitCode = c1->m_data.num;
2769 } else {
2770 g_context->write(cellAsVariant(*c1).toString());
2772 vmStack().popC();
2773 vmStack().pushNull();
2774 throw ExitException(exitCode);
2777 OPTBLD_INLINE void iopFatal(FatalOp kind_char) {
2778 TypedValue* top = vmStack().topTV();
2779 std::string msg;
2780 if (isStringType(top->m_type)) {
2781 msg = top->m_data.pstr->data();
2782 } else {
2783 msg = "Fatal error message not a string";
2785 vmStack().popTV();
2787 switch (kind_char) {
2788 case FatalOp::RuntimeOmitFrame:
2789 raise_error_without_first_frame(msg);
2790 break;
2791 case FatalOp::Runtime:
2792 case FatalOp::Parse:
2793 raise_error(msg);
2794 break;
2798 OPTBLD_INLINE void jmpSurpriseCheck(Offset offset) {
2799 if (offset <= 0 && UNLIKELY(checkSurpriseFlags())) {
2800 auto const flags = handle_request_surprise();
2802 // Memory Threhsold callback should also be fired here
2803 if (flags & MemThresholdFlag) {
2804 EventHook::DoMemoryThresholdCallback();
2809 OPTBLD_INLINE void iopJmp(PC& pc, PC targetpc) {
2810 jmpSurpriseCheck(targetpc - pc);
2811 pc = targetpc;
2814 OPTBLD_INLINE void iopJmpNS(PC& pc, PC targetpc) {
2815 pc = targetpc;
2818 template<Op op>
2819 OPTBLD_INLINE void jmpOpImpl(PC& pc, PC targetpc) {
2820 static_assert(op == OpJmpZ || op == OpJmpNZ,
2821 "jmpOpImpl should only be used by JmpZ and JmpNZ");
2822 jmpSurpriseCheck(targetpc - pc);
2824 Cell* c1 = vmStack().topC();
2825 if (c1->m_type == KindOfInt64 || c1->m_type == KindOfBoolean) {
2826 int64_t n = c1->m_data.num;
2827 vmStack().popX();
2828 if (op == OpJmpZ ? n == 0 : n != 0) pc = targetpc;
2829 } else {
2830 auto const cond = cellAsCVarRef(*c1).toBoolean();
2831 vmStack().popC();
2832 if (op == OpJmpZ ? !cond : cond) pc = targetpc;
2836 OPTBLD_INLINE void iopJmpZ(PC& pc, PC targetpc) {
2837 jmpOpImpl<OpJmpZ>(pc, targetpc);
2840 OPTBLD_INLINE void iopJmpNZ(PC& pc, PC targetpc) {
2841 jmpOpImpl<OpJmpNZ>(pc, targetpc);
2844 OPTBLD_INLINE void iopSelect() {
2845 auto const cond = [&]{
2846 auto c = vmStack().topC();
2847 if (c->m_type == KindOfInt64 || c->m_type == KindOfBoolean) {
2848 auto const val = (bool)c->m_data.num;
2849 vmStack().popX();
2850 return val;
2851 } else {
2852 auto const val = cellAsCVarRef(*c).toBoolean();
2853 vmStack().popC();
2854 return val;
2856 }();
2858 if (cond) {
2859 auto const t = *vmStack().topC();
2860 vmStack().discard();
2861 vmStack().replaceC(t);
2862 } else {
2863 vmStack().popC();
2867 OPTBLD_INLINE
2868 void iopIterBreak(PC& pc, PC targetpc, const IterTable& iterTab) {
2869 for (auto const& ent : iterTab) {
2870 auto iter = frame_iter(vmfp(), ent.id);
2871 switch (ent.kind) {
2872 case KindOfIter: iter->free(); break;
2873 case KindOfLIter: iter->free(); break;
2876 pc = targetpc;
2879 enum class SwitchMatch {
2880 NORMAL, // value was converted to an int: match normally
2881 NONZERO, // can't be converted to an int: match first nonzero case
2882 DEFAULT, // can't be converted to an int: match default case
2885 static SwitchMatch doubleCheck(double d, int64_t& out) {
2886 if (int64_t(d) == d) {
2887 out = d;
2888 return SwitchMatch::NORMAL;
2890 return SwitchMatch::DEFAULT;
2893 OPTBLD_INLINE
2894 void iopSwitch(PC origpc, PC& pc, SwitchKind kind, int64_t base,
2895 imm_array<Offset> jmptab) {
2896 auto const veclen = jmptab.size;
2897 assertx(veclen > 0);
2898 TypedValue* val = vmStack().topTV();
2899 if (kind == SwitchKind::Unbounded) {
2900 assertx(val->m_type == KindOfInt64);
2901 // Continuation switch: no bounds checking needed
2902 int64_t label = val->m_data.num;
2903 vmStack().popX();
2904 assertx(label >= 0 && label < veclen);
2905 pc = origpc + jmptab[label];
2906 } else {
2907 // Generic integer switch
2908 int64_t intval;
2909 SwitchMatch match = SwitchMatch::NORMAL;
2911 [&] {
2912 switch (val->m_type) {
2913 case KindOfUninit:
2914 case KindOfNull:
2915 intval = 0;
2916 return;
2918 case KindOfBoolean:
2919 // bool(true) is equal to any non-zero int, bool(false) == 0
2920 if (val->m_data.num) {
2921 match = SwitchMatch::NONZERO;
2922 } else {
2923 intval = 0;
2925 return;
2927 case KindOfInt64:
2928 intval = val->m_data.num;
2929 return;
2931 case KindOfDouble:
2932 match = doubleCheck(val->m_data.dbl, intval);
2933 return;
2935 case KindOfFunc:
2936 case KindOfClass:
2937 case KindOfPersistentString:
2938 case KindOfString: {
2939 double dval = 0.0;
2940 auto const str =
2941 isFuncType(val->m_type) ? funcToStringHelper(val->m_data.pfunc) :
2942 isClassType(val->m_type) ? classToStringHelper(val->m_data.pclass) :
2943 val->m_data.pstr;
2944 DataType t = str->isNumericWithVal(intval, dval, 1);
2945 switch (t) {
2946 case KindOfNull:
2947 intval = 0;
2948 break;
2949 case KindOfInt64:
2950 // do nothing
2951 break;
2952 case KindOfDouble:
2953 match = doubleCheck(dval, intval);
2954 break;
2955 case KindOfUninit:
2956 case KindOfBoolean:
2957 case KindOfPersistentString:
2958 case KindOfString:
2959 case KindOfPersistentVec:
2960 case KindOfVec:
2961 case KindOfPersistentDict:
2962 case KindOfDict:
2963 case KindOfPersistentKeyset:
2964 case KindOfKeyset:
2965 case KindOfPersistentShape:
2966 case KindOfShape:
2967 case KindOfPersistentArray:
2968 case KindOfArray:
2969 case KindOfObject:
2970 case KindOfResource:
2971 case KindOfRef:
2972 case KindOfFunc:
2973 case KindOfClass:
2974 case KindOfClsMeth:
2975 case KindOfRecord:
2976 not_reached();
2978 if (val->m_type == KindOfString) tvDecRefStr(val);
2979 return;
2982 case KindOfVec:
2983 tvDecRefArr(val);
2984 case KindOfPersistentVec:
2985 match = SwitchMatch::DEFAULT;
2986 return;
2988 case KindOfDict:
2989 tvDecRefArr(val);
2990 case KindOfPersistentDict:
2991 match = SwitchMatch::DEFAULT;
2992 return;
2994 case KindOfKeyset:
2995 tvDecRefArr(val);
2996 case KindOfPersistentKeyset:
2997 match = SwitchMatch::DEFAULT;
2998 return;
3000 case KindOfShape:
3001 tvDecRefArr(val);
3002 case KindOfPersistentShape:
3003 match = SwitchMatch::DEFAULT;
3004 return;
3006 case KindOfArray:
3007 tvDecRefArr(val);
3008 case KindOfPersistentArray:
3009 match = SwitchMatch::DEFAULT;
3010 return;
3012 case KindOfClsMeth:
3013 tvDecRefClsMeth(val);
3014 match = SwitchMatch::DEFAULT;
3015 break;
3017 case KindOfObject:
3018 intval = val->m_data.pobj->toInt64();
3019 tvDecRefObj(val);
3020 return;
3022 case KindOfResource:
3023 intval = val->m_data.pres->data()->o_toInt64();
3024 tvDecRefRes(val);
3025 return;
3027 case KindOfRecord: // TODO (T41029094)
3028 raise_error(Strings::RECORD_NOT_SUPPORTED);
3030 case KindOfRef:
3031 break;
3033 not_reached();
3034 }();
3035 vmStack().discard();
3037 if (match != SwitchMatch::NORMAL ||
3038 intval < base || intval >= (base + veclen - 2)) {
3039 switch (match) {
3040 case SwitchMatch::NORMAL:
3041 case SwitchMatch::DEFAULT:
3042 pc = origpc + jmptab[veclen - 1];
3043 break;
3045 case SwitchMatch::NONZERO:
3046 pc = origpc + jmptab[veclen - 2];
3047 break;
3049 } else {
3050 pc = origpc + jmptab[intval - base];
3055 OPTBLD_INLINE
3056 void iopSSwitch(PC origpc, PC& pc, imm_array<StrVecItem> jmptab) {
3057 auto const veclen = jmptab.size;
3058 assertx(veclen > 1);
3059 unsigned cases = veclen - 1; // the last vector item is the default case
3060 Cell* val = tvToCell(vmStack().topTV());
3061 Unit* u = vmfp()->m_func->unit();
3062 unsigned i;
3063 for (i = 0; i < cases; ++i) {
3064 auto item = jmptab[i];
3065 const StringData* str = u->lookupLitstrId(item.str);
3066 if (cellEqual(*val, str)) {
3067 pc = origpc + item.dest;
3068 vmStack().popC();
3069 return;
3072 // default case
3073 pc = origpc + jmptab[veclen - 1].dest;
3074 vmStack().popC();
3078 * jitReturnPre and jitReturnPost are used by RetC/V, CreateCont, NativeImpl,
3079 * Yield, and YieldK to perform a few tasks related to interpreting out of a
3080 * frame:
3082 * - If the current frame was entered in the TC and the jit is now off, we
3083 * throw a VMSwitchMode at the beginning of the bytecode to execute the
3084 * call's catch block (if present) before performing the return.
3085 * - If the current frame was entered in the TC and the jit is still on,
3086 * we wait until the end of the bytecode and throw a VMResumeTC, to return to
3087 * our translated caller rather than interpreting back into it.
3088 * - If the current frame was entered by the interpreter but was active when
3089 * the jit called MCGenerator::handleResume() (meaning it's the saved value
3090 * of %rbp in handleResume()'s stack frame), throw a VMResumeTC to reenter
3091 * handleResume(). This is necessary to update the value of %rbp in the TC
3092 * frame, so the unwinder doesn't read from a dead VM frame if something
3093 * throws from the interpreter later on.
3095 namespace {
3096 struct JitReturn {
3097 uint64_t savedRip;
3098 ActRec* fp;
3099 ActRec* sfp;
3100 uint32_t callOff;
3103 OPTBLD_INLINE JitReturn jitReturnPre(ActRec* fp) {
3104 auto savedRip = fp->m_savedRip;
3105 if (isReturnHelper(reinterpret_cast<void*>(savedRip))) {
3106 // This frame wasn't called from the TC, so it's ok to return using the
3107 // interpreter. callToExit is special: it's a return helper but we don't
3108 // treat it like one in here in order to simplify some things higher up in
3109 // the pipeline.
3110 if (reinterpret_cast<TCA>(savedRip) != jit::tc::ustubs().callToExit) {
3111 savedRip = 0;
3113 } else if (!RID().getJit()) {
3114 // We entered this frame in the TC but the jit is now disabled, probably
3115 // because a debugger is attached. If we leave this frame in the
3116 // interpreter, we might be skipping a catch block that our caller expects
3117 // to be run. Switch to the interpreter before even beginning the
3118 // instruction.
3119 throw VMSwitchMode();
3122 return {savedRip, fp, fp->sfp(), fp->m_callOff};
3125 OPTBLD_INLINE TCA jitReturnPost(JitReturn retInfo) {
3126 if (retInfo.savedRip) {
3127 if (isDebuggerReturnHelper(reinterpret_cast<void*>(retInfo.savedRip))) {
3128 // Our return address was smashed by the debugger. Do the work of the
3129 // debuggerRetHelper by setting some unwinder RDS info and resuming at
3130 // the approprate catch trace.
3131 assertx(jit::g_unwind_rds.isInit());
3132 jit::g_unwind_rds->debuggerReturnSP = vmsp();
3133 jit::g_unwind_rds->debuggerCallOff = retInfo.callOff;
3134 return jit::unstashDebuggerCatch(retInfo.fp);
3137 // This frame was called by translated code so we can't interpret out of
3138 // it. Resume in the TC right after our caller. This situation most
3139 // commonly happens when we interpOne a RetC due to having a VarEnv or some
3140 // other weird case.
3141 return TCA(retInfo.savedRip);
3144 if (!retInfo.sfp) {
3145 // If we don't have an sfp, we're returning from the first frame in this VM
3146 // nesting level. The vmJitCalledFrame() check below is only important if
3147 // we might throw before returning to the TC, which is guaranteed to not
3148 // happen in this situation.
3149 assertx(vmfp() == nullptr);
3150 return nullptr;
3154 // Consider a situation with a PHP function f() that calls another function
3155 // g(). If the call is interpreted, then we spend some time in the TC inside
3156 // g(), then eventually end in dispatchBB() (called by
3157 // MCGenerator::handleResume()) for g()'s RetC, the logic here kicks in.
3159 // g()'s VM frame was in %rbp when the TC called handleResume(), so it's
3160 // saved somewhere in handleResume()'s stack frame. If we return out of that
3161 // frame and keep going in the interpreter, that saved %rbp will be pointing
3162 // to a garbage VM frame. This is a problem if something needs to throw an
3163 // exception up through handleResume() and the TC frames above it, since the
3164 // C++ unwinder will attempt to treat parts of the popped VM frame as
3165 // pointers and segfault.
3167 // To avoid running with this dangling saved %rbp a few frames up, we
3168 // immediately throw an exception that is "caught" by the TC frame that
3169 // called handleResume(). We resume execution in the TC which reloads the new
3170 // vmfp() into %rbp, then handleResume() is called again, this time with a
3171 // live VM frame in %rbp.
3172 if (vmJitCalledFrame() == retInfo.fp) {
3173 FTRACE(1, "Returning from frame {}; resuming", vmJitCalledFrame());
3174 return jit::tc::ustubs().resumeHelper;
3177 return nullptr;
3180 OPTBLD_INLINE void returnToCaller(PC& pc, ActRec* sfp, Offset callOff) {
3181 vmfp() = sfp;
3182 pc = LIKELY(sfp != nullptr)
3183 ? skipCall(sfp->func()->getEntry() + callOff)
3184 : nullptr;
3189 template <bool suspended>
3190 OPTBLD_INLINE TCA ret(PC& pc) {
3191 assertx(!suspended || vmfp()->func()->isAsyncFunction());
3192 assertx(!suspended || !vmfp()->resumed());
3194 auto const jitReturn = jitReturnPre(vmfp());
3196 // Get the return value.
3197 TypedValue retval = *vmStack().topTV();
3198 vmStack().discard();
3200 assertx(
3201 !suspended || (tvIsObject(retval) && retval.m_data.pobj->isWaitHandle())
3204 // Free $this and local variables. Calls FunctionReturn hook. The return
3205 // value must be removed from the stack, or the unwinder would try to free it
3206 // if the hook throws---but the event hook routine decrefs the return value
3207 // in that case if necessary.
3208 frame_free_locals_inl(vmfp(), vmfp()->func()->numLocals(), &retval);
3210 if (isProfileRequest()) {
3211 profileIncrementFuncCounter(vmfp()->func());
3214 // Grab caller info from ActRec.
3215 ActRec* sfp = vmfp()->sfp();
3216 Offset callOff = vmfp()->m_callOff;
3218 if (LIKELY(!vmfp()->resumed())) {
3219 // If in an eagerly executed async function, wrap the return value into
3220 // succeeded StaticWaitHandle. Async eager return requests are currently
3221 // not respected, as we don't have a way to obtain the async eager offset.
3222 if (UNLIKELY(vmfp()->func()->isAsyncFunction()) && !suspended) {
3223 auto const& retvalCell = *tvAssertCell(&retval);
3224 // Heads up that we're assuming CreateSucceeded can't throw, or we won't
3225 // decref the return value. (It can't right now.)
3226 auto const waitHandle = c_StaticWaitHandle::CreateSucceeded(retvalCell);
3227 cellCopy(make_tv<KindOfObject>(waitHandle), retval);
3230 // Free ActRec and store the return value.
3231 vmStack().ndiscard(vmfp()->func()->numSlotsInFrame());
3232 vmStack().ret();
3233 *vmStack().topTV() = retval;
3234 assertx(vmStack().topTV() == vmfp()->retSlot());
3235 // In case async eager return was requested by the caller, pretend that
3236 // we did not finish eagerly as we already boxed the value.
3237 vmStack().topTV()->m_aux.u_asyncNonEagerReturnFlag = -1;
3238 } else if (vmfp()->func()->isAsyncFunction()) {
3239 // Mark the async function as succeeded and store the return value.
3240 assertx(!sfp);
3241 auto wh = frame_afwh(vmfp());
3242 wh->ret(retval);
3243 decRefObj(wh);
3244 } else if (vmfp()->func()->isAsyncGenerator()) {
3245 // Mark the async generator as finished.
3246 assertx(isNullType(retval.m_type));
3247 auto const gen = frame_async_generator(vmfp());
3248 auto const eagerResult = gen->ret();
3249 if (eagerResult) {
3250 // Eager execution => return StaticWaitHandle.
3251 assertx(sfp);
3252 vmStack().pushObjectNoRc(eagerResult);
3253 } else {
3254 // Resumed execution => return control to the scheduler.
3255 assertx(!sfp);
3257 } else if (vmfp()->func()->isNonAsyncGenerator()) {
3258 // Mark the generator as finished and store the return value.
3259 frame_generator(vmfp())->ret(retval);
3261 // Push return value of next()/send()/raise().
3262 vmStack().pushNull();
3263 } else {
3264 not_reached();
3267 // Return control to the caller.
3268 returnToCaller(pc, sfp, callOff);
3270 return jitReturnPost(jitReturn);
3273 OPTBLD_INLINE TCA iopRetC(PC& pc) {
3274 return ret<false>(pc);
3277 OPTBLD_INLINE TCA iopRetCSuspended(PC& pc) {
3278 assertx(vmfp()->func()->isAsyncFunction());
3279 assertx(!vmfp()->resumed());
3280 return ret<true>(pc);
3283 OPTBLD_INLINE TCA iopRetM(PC& pc, uint32_t numRet) {
3284 auto const jitReturn = jitReturnPre(vmfp());
3286 req::vector<TypedValue> retvals;
3287 retvals.reserve(numRet);
3289 for (int i = numRet - 1; i >= 0; i--) {
3290 retvals.push_back(*vmStack().indC(i));
3293 vmStack().ndiscard(numRet);
3295 // Free $this and local variables. Calls FunctionReturn hook. The return
3296 // value must be removed from the stack, or the unwinder would try to free it
3297 // if the hook throws---but the event hook routine decrefs the return value
3298 // in that case if necessary.
3299 frame_free_locals_inl(vmfp(), vmfp()->func()->numLocals(), &retvals[0]);
3301 assertx(!vmfp()->func()->isGenerator() && !vmfp()->func()->isAsync());
3303 if (isProfileRequest()) {
3304 profileIncrementFuncCounter(vmfp()->func());
3307 // Grab caller info from ActRec.
3308 ActRec* sfp = vmfp()->sfp();
3309 Offset callOff = vmfp()->m_callOff;
3311 // Free ActRec and store the return value.
3312 vmStack().ndiscard(vmfp()->func()->numSlotsInFrame());
3313 vmStack().ret();
3315 // Discard scratch space for return values allocated for multi return FCall
3316 vmStack().ndiscard(numRet - 1);
3317 *vmStack().topTV() = retvals[1];
3319 for (int i = 2; i < numRet; i++) {
3320 *vmStack().allocTV() = retvals[i];
3323 // Store the actual return value at the top of the stack
3324 *vmStack().allocTV() = retvals[0];
3326 // Return control to the caller.
3327 returnToCaller(pc, sfp, callOff);
3329 return jitReturnPost(jitReturn);
3332 OPTBLD_INLINE void iopUnwind() {
3333 assertx(!g_context->m_faults.empty());
3334 assertx(g_context->m_faults.back().m_raiseOffset != kInvalidOffset);
3335 throw VMPrepareUnwind();
3338 OPTBLD_INLINE void iopThrow(PC&) {
3339 Cell* c1 = vmStack().topC();
3340 if (c1->m_type != KindOfObject ||
3341 !c1->m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) {
3342 raise_error("Exceptions must implement the Throwable interface.");
3344 auto obj = Object::attach(c1->m_data.pobj);
3345 vmStack().discard();
3346 DEBUGGER_ATTACHED_ONLY(phpDebuggerExceptionThrownHook(obj.get()));
3347 throw req::root<Object>(std::move(obj));
3350 OPTBLD_INLINE void iopClsRefGetC(clsref_slot slot) {
3351 auto const cell = vmStack().topC();
3352 if (isStringType(cell->m_type)) {
3353 raise_str_to_class_notice(cell->m_data.pstr);
3355 auto const cls = lookupClsRef(cell);
3356 ArrayData* reified_types = getReifiedGenericsOpt(*cell);
3357 slot.put(reified_types, cls);
3358 vmStack().popC();
3361 OPTBLD_INLINE void iopClsRefGetTS(clsref_slot slot) {
3362 auto const cell = vmStack().topC();
3363 if (!tvIsDictOrDArray(cell)) {
3364 raise_error("Reified type must be a type structure");
3366 auto const ts = cell->m_data.parr;
3367 auto const classname_field = ts->rval(s_classname.get());
3368 if (!classname_field.is_set()) {
3369 raise_error("You cannot create a new instance of this type as "
3370 "it is not a class");
3372 assertx(isStringType(classname_field.type()));
3373 auto const name = classname_field.val().pstr;
3374 auto const generics_field = ts->rval(s_generic_types.get());
3375 auto mangledName = name;
3376 ArrayData* reified_types = nullptr;
3377 if (generics_field.is_set()) {
3378 reified_types = generics_field.val().parr;
3379 auto const mangledTypeName =
3380 makeStaticString(mangleReifiedGenericsName(reified_types));
3381 addToReifiedGenericsTable(mangledTypeName, reified_types);
3382 mangledName = mangleReifiedName(name, mangledTypeName);
3384 auto tv = make_tv<KindOfString>(mangledName);
3385 auto const cls = lookupClsRef(&tv);
3386 slot.put(reified_types, cls);
3387 vmStack().popC();
3390 static void raise_undefined_local(ActRec* fp, Id pind) {
3391 assertx(pind < fp->m_func->numNamedLocals());
3392 raise_notice(Strings::UNDEFINED_VARIABLE,
3393 fp->m_func->localVarName(pind)->data());
3396 static inline void cgetl_inner_body(TypedValue* fr, TypedValue* to) {
3397 assertx(fr->m_type != KindOfUninit);
3398 cellDup(*tvToCell(fr), *to);
3401 OPTBLD_INLINE void cgetl_body(ActRec* fp,
3402 TypedValue* fr,
3403 TypedValue* to,
3404 Id pind,
3405 bool warn) {
3406 if (fr->m_type == KindOfUninit) {
3407 // `to' is uninitialized here, so we need to tvWriteNull before
3408 // possibly causing stack unwinding.
3409 tvWriteNull(*to);
3410 if (warn) raise_undefined_local(fp, pind);
3411 } else {
3412 cgetl_inner_body(fr, to);
3416 OPTBLD_FLT_INLINE void iopCGetL(local_var fr) {
3417 Cell* to = vmStack().allocC();
3418 cgetl_body(vmfp(), fr.ptr, to, fr.index, true);
3421 OPTBLD_INLINE void iopCGetQuietL(local_var fr) {
3422 Cell* to = vmStack().allocC();
3423 cgetl_body(vmfp(), fr.ptr, to, fr.index, false);
3426 OPTBLD_INLINE void iopCUGetL(local_var fr) {
3427 auto to = vmStack().allocTV();
3428 tvDup(*tvToCell(fr.ptr), *to);
3431 OPTBLD_INLINE void iopCGetL2(local_var fr) {
3432 TypedValue* oldTop = vmStack().topTV();
3433 TypedValue* newTop = vmStack().allocTV();
3434 memcpy(newTop, oldTop, sizeof *newTop);
3435 Cell* to = oldTop;
3436 cgetl_body(vmfp(), fr.ptr, to, fr.index, true);
3439 OPTBLD_INLINE void iopPushL(local_var locVal) {
3440 assertx(locVal->m_type != KindOfUninit);
3441 assertx(!isRefType(locVal->m_type));
3442 TypedValue* dest = vmStack().allocTV();
3443 *dest = *locVal;
3444 locVal->m_type = KindOfUninit;
3447 OPTBLD_INLINE void cgetg_body(bool warn) {
3448 StringData* name;
3449 TypedValue* to = vmStack().topTV();
3450 TypedValue* fr = nullptr;
3451 lookup_gbl(vmfp(), name, to, fr);
3452 SCOPE_EXIT { decRefStr(name); };
3453 if (fr == nullptr) {
3454 if (warn && MoreWarnings) {
3455 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3457 tvDecRefGen(to);
3458 tvWriteNull(*to);
3459 } else if (fr->m_type == KindOfUninit) {
3460 if (warn) raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3461 tvDecRefGen(to);
3462 tvWriteNull(*to);
3463 } else {
3464 tvDecRefGen(to);
3465 cgetl_inner_body(fr, to);
3469 OPTBLD_INLINE void iopCGetG() { cgetg_body(true); }
3470 OPTBLD_INLINE void iopCGetQuietG() { cgetg_body(false); }
3472 struct SpropState {
3473 SpropState(Stack&, clsref_slot slot, bool ignoreLateInit);
3474 ~SpropState();
3475 StringData* name;
3476 Class* cls;
3477 TypedValue* output;
3478 TypedValue* val;
3479 TypedValue oldNameCell;
3480 Slot slot;
3481 bool visible;
3482 bool accessible;
3485 SpropState::SpropState(Stack& vmstack, clsref_slot cslot, bool ignoreLateInit) {
3486 cls = cslot.take().second;
3487 auto nameCell = output = vmstack.topTV();
3488 lookup_sprop(vmfp(), cls, name, nameCell, val,
3489 slot, visible, accessible, ignoreLateInit);
3490 oldNameCell = *nameCell;
3493 SpropState::~SpropState() {
3494 decRefStr(name);
3495 tvDecRefGen(oldNameCell);
3498 template<bool box> void getS(clsref_slot slot) {
3499 SpropState ss(vmStack(), slot, false);
3500 if (!(ss.visible && ss.accessible)) {
3501 raise_error("Invalid static property access: %s::%s",
3502 ss.cls->name()->data(),
3503 ss.name->data());
3505 if (box) {
3506 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
3507 auto const& sprop = ss.cls->staticProperties()[ss.slot];
3508 auto const& tc = sprop.typeConstraint;
3509 if (!tc.isMixedResolved()) {
3510 raise_property_typehint_binding_error(
3511 sprop.cls,
3512 sprop.name,
3513 true,
3514 tc.isSoft()
3518 if (!isRefType(ss.val->m_type)) {
3519 tvBox(*ss.val);
3521 refDup(*ss.val, *ss.output);
3522 } else {
3523 cellDup(*tvToCell(ss.val), *ss.output);
3527 OPTBLD_INLINE void iopCGetS(clsref_slot slot) {
3528 getS<false>(slot);
3531 static inline MInstrState& initMState() {
3532 auto& mstate = vmMInstrState();
3533 tvWriteUninit(mstate.tvRef);
3534 tvWriteUninit(mstate.tvRef2);
3535 mstate.propState = MInstrPropState{};
3536 return mstate;
3539 static inline void baseGImpl(TypedValue* key, MOpMode mode) {
3540 auto& mstate = initMState();
3541 StringData* name;
3542 TypedValue* baseVal;
3544 if (mode == MOpMode::Define) lookupd_gbl(vmfp(), name, key, baseVal);
3545 else lookup_gbl(vmfp(), name, key, baseVal);
3546 SCOPE_EXIT { decRefStr(name); };
3548 if (baseVal == nullptr) {
3549 assertx(mode != MOpMode::Define);
3550 if (mode == MOpMode::Warn) {
3551 raise_notice(Strings::UNDEFINED_VARIABLE, name->data());
3553 tvWriteNull(mstate.tvTempBase);
3554 mstate.base = &mstate.tvTempBase;
3555 return;
3558 mstate.base = baseVal;
3561 OPTBLD_INLINE void iopBaseGC(uint32_t idx, MOpMode mode) {
3562 baseGImpl(vmStack().indTV(idx), mode);
3565 OPTBLD_INLINE void iopBaseGL(local_var loc, MOpMode mode) {
3566 baseGImpl(tvToCell(loc.ptr), mode);
3569 static inline tv_lval baseSImpl(TypedValue* key,
3570 clsref_slot slot,
3571 MOpMode mode) {
3572 auto const class_ = slot.take().second;
3574 auto const name = lookup_name(key);
3575 SCOPE_EXIT { decRefStr(name); };
3576 auto const lookup = class_->getSProp(arGetContextClass(vmfp()), name);
3577 if (!lookup.val || !lookup.accessible) {
3578 raise_error("Invalid static property access: %s::%s",
3579 class_->name()->data(),
3580 name->data());
3583 if (RuntimeOption::EvalCheckPropTypeHints > 0 && mode == MOpMode::Define) {
3584 vmMInstrState().propState = MInstrPropState{class_, lookup.slot, true};
3587 return tv_lval(lookup.val);
3590 OPTBLD_INLINE void iopBaseSC(uint32_t keyIdx, clsref_slot slot, MOpMode mode) {
3591 auto& mstate = initMState();
3592 mstate.base = baseSImpl(vmStack().indTV(keyIdx), slot, mode);
3595 OPTBLD_INLINE void baseLImpl(local_var loc, MOpMode mode) {
3596 auto& mstate = initMState();
3597 auto local = tvToCell(loc.ptr);
3598 if (mode == MOpMode::Warn && local->m_type == KindOfUninit) {
3599 raise_notice(Strings::UNDEFINED_VARIABLE,
3600 vmfp()->m_func->localVarName(loc.index)->data());
3602 mstate.base = local;
3605 OPTBLD_INLINE void iopBaseL(local_var loc, MOpMode mode) {
3606 baseLImpl(loc, mode);
3609 OPTBLD_INLINE void iopBaseC(uint32_t idx, MOpMode) {
3610 auto& mstate = initMState();
3611 mstate.base = vmStack().indC(idx);
3614 OPTBLD_INLINE void iopBaseH() {
3615 auto& mstate = initMState();
3616 mstate.tvTempBase = make_tv<KindOfObject>(vmfp()->getThis());
3617 mstate.base = &mstate.tvTempBase;
3620 static OPTBLD_INLINE void propDispatch(MOpMode mode, TypedValue key,
3621 bool reffy) {
3622 auto& mstate = vmMInstrState();
3623 auto pState = &mstate.propState;
3624 auto ctx = arGetContextClass(vmfp());
3626 auto const result = [&]{
3627 switch (mode) {
3628 case MOpMode::None:
3629 assertx(!reffy);
3630 return Prop<MOpMode::None>(mstate.tvRef, ctx, mstate.base, key, pState);
3631 case MOpMode::Warn:
3632 assertx(!reffy);
3633 return Prop<MOpMode::Warn>(mstate.tvRef, ctx, mstate.base, key, pState);
3634 case MOpMode::Define:
3635 if (reffy) {
3636 return Prop<MOpMode::Define,KeyType::Any,true>(
3637 mstate.tvRef, ctx, mstate.base, key, pState
3639 } else {
3640 return Prop<MOpMode::Define,KeyType::Any,false>(
3641 mstate.tvRef, ctx, mstate.base, key, pState
3644 case MOpMode::Unset:
3645 assertx(!reffy);
3646 return Prop<MOpMode::Unset>(
3647 mstate.tvRef, ctx, mstate.base, key, pState
3649 case MOpMode::InOut:
3650 always_assert_flog(false, "MOpMode::InOut can only occur on Elem");
3652 always_assert(false);
3653 }();
3655 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3658 static OPTBLD_INLINE void propQDispatch(MOpMode mode, TypedValue key,
3659 bool reffy) {
3660 auto& mstate = vmMInstrState();
3661 auto ctx = arGetContextClass(vmfp());
3663 auto const result = [&] {
3664 switch (mode) {
3665 case MOpMode::None:
3666 case MOpMode::Warn:
3667 assertx(key.m_type == KindOfPersistentString);
3668 return nullSafeProp(mstate.tvRef, ctx,
3669 mstate.base, key.m_data.pstr);
3670 case MOpMode::Define:
3671 if (reffy) raise_error(Strings::NULLSAFE_PROP_WRITE_ERROR);
3672 case MOpMode::InOut:
3673 always_assert_flog(false, "MOpMode::InOut can only occur on Elem");
3674 case MOpMode::Unset:
3675 always_assert(false);
3677 not_reached();
3678 }();
3680 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3683 static OPTBLD_INLINE
3684 void elemDispatch(MOpMode mode, TypedValue key, bool reffy) {
3685 auto& mstate = vmMInstrState();
3686 auto const b = mstate.base;
3688 auto const result = [&]() -> tv_rval {
3689 switch (mode) {
3690 case MOpMode::None:
3691 switch (intishCastMode()) {
3692 case ICMode::Warn:
3693 return Elem<MOpMode::None, ICMode::Warn>(
3694 mstate.tvRef, b, key
3696 case ICMode::Cast:
3697 return Elem<MOpMode::None, ICMode::Cast>(
3698 mstate.tvRef, b, key
3700 case ICMode::Ignore:
3701 return Elem<MOpMode::None, ICMode::Ignore>(
3702 mstate.tvRef, b, key
3705 case MOpMode::Warn:
3706 switch (intishCastMode()) {
3707 case ICMode::Warn:
3708 return Elem<MOpMode::Warn, ICMode::Warn>(
3709 mstate.tvRef, b, key
3711 case ICMode::Cast:
3712 return Elem<MOpMode::Warn, ICMode::Cast>(
3713 mstate.tvRef, b, key
3715 case ICMode::Ignore:
3716 return Elem<MOpMode::Warn, ICMode::Ignore>(
3717 mstate.tvRef, b, key
3720 case MOpMode::InOut:
3721 switch (intishCastMode()) {
3722 case ICMode::Warn:
3723 return Elem<MOpMode::InOut, ICMode::Warn>(
3724 mstate.tvRef, b, key
3726 case ICMode::Cast:
3727 return Elem<MOpMode::InOut, ICMode::Cast>(
3728 mstate.tvRef, b, key
3730 case ICMode::Ignore:
3731 return Elem<MOpMode::InOut, ICMode::Ignore>(
3732 mstate.tvRef, b, key
3735 case MOpMode::Define:
3736 switch (intishCastMode()) {
3737 case ICMode::Warn:
3738 return reffy
3739 ? ElemD<MOpMode::Define, true, ICMode::Warn>(
3740 mstate.tvRef, b, key, &mstate.propState
3742 : ElemD<MOpMode::Define, false, ICMode::Warn>(
3743 mstate.tvRef, b, key, &mstate.propState
3745 case ICMode::Cast:
3746 return reffy
3747 ? ElemD<MOpMode::Define, true, ICMode::Cast>(
3748 mstate.tvRef, b, key, &mstate.propState
3750 : ElemD<MOpMode::Define, false, ICMode::Cast>(
3751 mstate.tvRef, b, key, &mstate.propState
3753 case ICMode::Ignore:
3754 return reffy
3755 ? ElemD<MOpMode::Define, true, ICMode::Ignore>(
3756 mstate.tvRef, b, key, &mstate.propState
3758 : ElemD<MOpMode::Define, false, ICMode::Ignore>(
3759 mstate.tvRef, b, key, &mstate.propState
3762 case MOpMode::Unset:
3763 switch (intishCastMode()) {
3764 case ICMode::Warn:
3765 return ElemU<ICMode::Warn>(mstate.tvRef, b, key);
3766 case ICMode::Cast:
3767 return ElemU<ICMode::Cast>(mstate.tvRef, b, key);
3768 case ICMode::Ignore:
3769 return ElemU<ICMode::Ignore>(mstate.tvRef, b, key);
3772 always_assert(false);
3773 }().as_lval();
3775 if (mode == MOpMode::Define) mstate.propState = MInstrPropState{};
3776 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3779 static inline TypedValue key_tv(MemberKey key) {
3780 switch (key.mcode) {
3781 case MW:
3782 return TypedValue{};
3783 case MEL: case MPL: {
3784 auto local = tvToCell(frame_local(vmfp(), key.iva));
3785 if (local->m_type == KindOfUninit) {
3786 raise_undefined_local(vmfp(), key.iva);
3787 return make_tv<KindOfNull>();
3789 return *local;
3791 case MEC: case MPC:
3792 return *vmStack().indTV(key.iva);
3793 case MEI:
3794 return make_tv<KindOfInt64>(key.int64);
3795 case MET: case MPT: case MQT:
3796 return make_tv<KindOfPersistentString>(key.litstr);
3798 not_reached();
3801 static OPTBLD_INLINE void dimDispatch(MOpMode mode, MemberKey mk,
3802 bool reffy) {
3803 auto const key = key_tv(mk);
3804 if (mk.mcode == MQT) {
3805 propQDispatch(mode, key, reffy);
3806 } else if (mcodeIsProp(mk.mcode)) {
3807 propDispatch(mode, key, reffy);
3808 } else if (mcodeIsElem(mk.mcode)) {
3809 elemDispatch(mode, key, reffy);
3810 } else {
3811 if (mode == MOpMode::Warn) raise_error("Cannot use [] for reading");
3813 auto& mstate = vmMInstrState();
3814 auto const base = mstate.base;
3815 auto const result = [&] {
3816 if (reffy) {
3817 if (UNLIKELY(isHackArrayType(type(base)))) {
3818 throwRefInvalidArrayValueException(val(base).parr);
3820 return NewElem<true>(mstate.tvRef, base, &mstate.propState);
3821 } else {
3822 return NewElem<false>(mstate.tvRef, base, &mstate.propState);
3824 }();
3825 if (mode == MOpMode::Define) mstate.propState = MInstrPropState{};
3826 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3830 OPTBLD_INLINE void iopDim(MOpMode mode, MemberKey mk) {
3831 dimDispatch(mode, mk, false);
3834 static OPTBLD_INLINE void mFinal(MInstrState& mstate,
3835 int32_t nDiscard,
3836 folly::Optional<TypedValue> result) {
3837 auto& stack = vmStack();
3838 for (auto i = 0; i < nDiscard; ++i) stack.popTV();
3839 if (result) tvCopy(*result, *stack.allocTV());
3841 tvDecRefGenUnlikely(mstate.tvRef);
3842 tvDecRefGenUnlikely(mstate.tvRef2);
3845 static OPTBLD_INLINE
3846 void queryMImpl(MemberKey mk, int32_t nDiscard, QueryMOp op) {
3847 auto const key = key_tv(mk);
3848 auto& mstate = vmMInstrState();
3849 TypedValue result;
3850 switch (op) {
3851 case QueryMOp::InOut:
3852 always_assert_flog(
3853 mcodeIsElem(mk.mcode), "QueryM InOut is only compatible with Elem"
3855 // fallthrough
3856 case QueryMOp::CGet:
3857 case QueryMOp::CGetQuiet:
3858 dimDispatch(getQueryMOpMode(op), mk, false);
3859 tvDup(*tvToCell(mstate.base), result);
3860 break;
3862 case QueryMOp::Isset:
3863 case QueryMOp::Empty:
3864 result.m_type = KindOfBoolean;
3865 if (mcodeIsProp(mk.mcode)) {
3866 auto const ctx = arGetContextClass(vmfp());
3867 result.m_data.num = op == QueryMOp::Empty
3868 ? IssetEmptyProp<true>(ctx, mstate.base, key)
3869 : IssetEmptyProp<false>(ctx, mstate.base, key);
3870 } else {
3871 assertx(mcodeIsElem(mk.mcode));
3873 switch (intishCastMode()) {
3874 case ICMode::Warn:
3875 result.m_data.num = op == QueryMOp::Empty
3876 ? IssetEmptyElem<true, ICMode::Warn>(mstate.base, key)
3877 : IssetEmptyElem<false, ICMode::Warn>(mstate.base, key);
3878 break;
3879 case ICMode::Cast:
3880 result.m_data.num = op == QueryMOp::Empty
3881 ? IssetEmptyElem<true, ICMode::Cast>(mstate.base, key)
3882 : IssetEmptyElem<false, ICMode::Cast>(mstate.base, key);
3883 break;
3884 case ICMode::Ignore:
3885 result.m_data.num = op == QueryMOp::Empty
3886 ? IssetEmptyElem<true, ICMode::Ignore>(mstate.base, key)
3887 : IssetEmptyElem<false, ICMode::Ignore>(mstate.base, key);
3888 break;
3891 break;
3893 mFinal(mstate, nDiscard, result);
3896 OPTBLD_INLINE void iopQueryM(uint32_t nDiscard, QueryMOp subop, MemberKey mk) {
3897 queryMImpl(mk, nDiscard, subop);
3900 static OPTBLD_INLINE void vGetMImpl(MemberKey mk, int32_t nDiscard) {
3901 auto& mstate = vmMInstrState();
3902 TypedValue result;
3903 dimDispatch(MOpMode::Define, mk, true);
3904 tvBoxIfNeeded(mstate.base);
3905 refDup(*mstate.base, result);
3906 mFinal(mstate, nDiscard, result);
3909 OPTBLD_INLINE void iopVGetM(uint32_t nDiscard, MemberKey mk) {
3910 vGetMImpl(mk, nDiscard);
3913 OPTBLD_FLT_INLINE void iopSetM(uint32_t nDiscard, MemberKey mk) {
3914 auto& mstate = vmMInstrState();
3915 auto const topC = vmStack().topC();
3917 if (mk.mcode == MW) {
3918 SetNewElem<true>(mstate.base, topC, &mstate.propState);
3919 } else {
3920 auto const key = key_tv(mk);
3921 if (mcodeIsElem(mk.mcode)) {
3922 auto const result = ([&] {
3923 switch (intishCastMode()) {
3924 case ICMode::Warn:
3925 return SetElem<true, ICMode::Warn>(
3926 mstate.base, key, topC, &mstate.propState
3928 case ICMode::Cast:
3929 return SetElem<true, ICMode::Cast>(
3930 mstate.base, key, topC, &mstate.propState
3932 case ICMode::Ignore:
3933 return SetElem<true, ICMode::Ignore>(
3934 mstate.base, key, topC, &mstate.propState
3937 not_reached();
3938 })();
3939 if (result) {
3940 tvDecRefGen(topC);
3941 topC->m_type = KindOfString;
3942 topC->m_data.pstr = result;
3944 } else {
3945 auto const ctx = arGetContextClass(vmfp());
3946 SetProp<true>(ctx, mstate.base, key, topC, &mstate.propState);
3950 auto const result = *topC;
3951 vmStack().discard();
3952 mFinal(mstate, nDiscard, result);
3955 OPTBLD_INLINE void iopSetRangeM(
3956 uint32_t nDiscard, SetRangeOp op, uint32_t size
3958 auto& mstate = vmMInstrState();
3959 auto const count = tvCastToInt64(*vmStack().indC(0));
3960 auto const src = *vmStack().indC(1);
3961 auto const offset = tvCastToInt64(*vmStack().indC(2));
3963 if (op == SetRangeOp::Forward) {
3964 SetRange<false>(mstate.base, offset, src, count, size);
3965 } else {
3966 SetRange<true>(mstate.base, offset, src, count, size);
3969 mFinal(mstate, nDiscard + 3, folly::none);
3972 OPTBLD_INLINE void iopIncDecM(uint32_t nDiscard, IncDecOp subop, MemberKey mk) {
3973 auto const key = key_tv(mk);
3975 auto& mstate = vmMInstrState();
3976 Cell result;
3977 if (mcodeIsProp(mk.mcode)) {
3978 result = IncDecProp(
3979 arGetContextClass(vmfp()), subop, mstate.base, key, &mstate.propState
3981 } else if (mcodeIsElem(mk.mcode)) {
3982 switch (intishCastMode()) {
3983 case ICMode::Warn:
3984 result = IncDecElem<ICMode::Warn>(
3985 subop, mstate.base, key, &mstate.propState
3987 break;
3988 case ICMode::Cast:
3989 result = IncDecElem<ICMode::Cast>(
3990 subop, mstate.base, key, &mstate.propState
3992 break;
3993 case ICMode::Ignore:
3994 result = IncDecElem<ICMode::Ignore>(
3995 subop, mstate.base, key, &mstate.propState
3997 break;
3999 } else {
4000 result = IncDecNewElem(mstate.tvRef, subop, mstate.base, &mstate.propState);
4003 mFinal(mstate, nDiscard, result);
4006 OPTBLD_INLINE void iopSetOpM(uint32_t nDiscard, SetOpOp subop, MemberKey mk) {
4007 auto const key = key_tv(mk);
4008 auto const rhs = vmStack().topC();
4010 auto& mstate = vmMInstrState();
4011 tv_lval result;
4012 if (mcodeIsProp(mk.mcode)) {
4013 result = SetOpProp(mstate.tvRef, arGetContextClass(vmfp()), subop,
4014 mstate.base, key, rhs, &mstate.propState);
4015 } else if (mcodeIsElem(mk.mcode)) {
4016 switch (intishCastMode()) {
4017 case ICMode::Warn:
4018 result = SetOpElem<ICMode::Warn>(
4019 mstate.tvRef, subop, mstate.base, key, rhs, &mstate.propState
4021 break;
4022 case ICMode::Cast:
4023 result = SetOpElem<ICMode::Cast>(
4024 mstate.tvRef, subop, mstate.base, key, rhs, &mstate.propState
4026 break;
4027 case ICMode::Ignore:
4028 result = SetOpElem<ICMode::Ignore>(
4029 mstate.tvRef, subop, mstate.base, key, rhs, &mstate.propState
4031 break;
4033 } else {
4034 result =
4035 SetOpNewElem(mstate.tvRef, subop, mstate.base, rhs, &mstate.propState);
4038 vmStack().popC();
4039 result = tvToCell(result);
4040 tvIncRefGen(*result);
4041 mFinal(mstate, nDiscard, *result);
4044 OPTBLD_INLINE void iopBindM(uint32_t nDiscard, MemberKey mk) {
4045 auto& mstate = vmMInstrState();
4046 auto const rhs = *vmStack().topV();
4048 dimDispatch(MOpMode::Define, mk, true);
4049 tvBind(rhs, mstate.base);
4051 vmStack().discard();
4052 mFinal(mstate, nDiscard, rhs);
4055 OPTBLD_INLINE void iopUnsetM(uint32_t nDiscard, MemberKey mk) {
4056 auto const key = key_tv(mk);
4058 auto& mstate = vmMInstrState();
4059 if (mcodeIsProp(mk.mcode)) {
4060 UnsetProp(arGetContextClass(vmfp()), mstate.base, key);
4061 } else {
4062 assertx(mcodeIsElem(mk.mcode));
4063 switch (intishCastMode()) {
4064 case ICMode::Warn:
4065 UnsetElem<ICMode::Warn>(mstate.base, key);
4066 break;
4067 case ICMode::Cast:
4068 UnsetElem<ICMode::Cast>(mstate.base, key);
4069 break;
4070 case ICMode::Ignore:
4071 UnsetElem<ICMode::Ignore>(mstate.base, key);
4072 break;
4076 mFinal(mstate, nDiscard, folly::none);
4079 namespace {
4081 inline void checkThis(ActRec* fp) {
4082 if (!fp->func()->cls() || !fp->hasThis()) {
4083 raise_error(Strings::FATAL_NULL_THIS);
4087 OPTBLD_INLINE const Cell* memoGetImpl(LocalRange keys) {
4088 assertx(vmfp()->m_func->isMemoizeWrapper());
4089 assertx(keys.first + keys.count <= vmfp()->m_func->numLocals());
4091 for (auto i = 0; i < keys.count; ++i) {
4092 auto const key = frame_local(vmfp(), keys.first + i);
4093 if (!isIntType(key->m_type) && !isStringType(key->m_type)) {
4094 raise_error("Memoization keys can only be ints or strings");
4098 auto const c = [&] () -> const Cell* {
4099 auto const func = vmfp()->m_func;
4100 if (!func->isMethod() || func->isStatic()) {
4101 auto const lsbCls =
4102 func->isMemoizeWrapperLSB() ? vmfp()->getClass() : nullptr;
4103 if (keys.count > 0) {
4104 auto cache =
4105 lsbCls ? rds::bindLSBMemoCache(lsbCls, func)
4106 : rds::bindStaticMemoCache(func);
4107 if (!cache.isInit()) return nullptr;
4108 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
4109 if (auto getter = memoCacheGetForKeyCount(keys.count)) {
4110 return getter(*cache, keysBegin);
4112 return memoCacheGetGeneric(
4113 *cache,
4114 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
4115 keysBegin
4119 auto cache =
4120 lsbCls ? rds::bindLSBMemoValue(lsbCls, func)
4121 : rds::bindStaticMemoValue(func);
4122 return cache.isInit() ? cache.get() : nullptr;
4125 checkThis(vmfp());
4126 auto const this_ = vmfp()->getThis();
4127 auto const cls = func->cls();
4128 assertx(this_->instanceof(cls));
4129 assertx(cls->hasMemoSlots());
4131 auto const memoInfo = cls->memoSlotForFunc(func->getFuncId());
4133 auto const slot = UNLIKELY(this_->hasNativeData())
4134 ? this_->memoSlotNativeData(memoInfo.first, cls->getNativeDataInfo()->sz)
4135 : this_->memoSlot(memoInfo.first);
4137 if (keys.count == 0 && !memoInfo.second) {
4138 auto const val = slot->getValue();
4139 return val->m_type != KindOfUninit ? val : nullptr;
4142 auto const cache = slot->getCache();
4143 if (!cache) return nullptr;
4145 if (memoInfo.second) {
4146 if (keys.count == 0) {
4147 return memoCacheGetSharedOnly(
4148 cache,
4149 makeSharedOnlyKey(func->getFuncId())
4152 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
4153 if (auto const getter = sharedMemoCacheGetForKeyCount(keys.count)) {
4154 return getter(cache, func->getFuncId(), keysBegin);
4156 return memoCacheGetGeneric(
4157 cache,
4158 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
4159 keysBegin
4163 assertx(keys.count > 0);
4164 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
4165 if (auto const getter = memoCacheGetForKeyCount(keys.count)) {
4166 return getter(cache, keysBegin);
4168 return memoCacheGetGeneric(
4169 cache,
4170 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
4171 keysBegin
4173 }();
4175 assertx(!c || cellIsPlausible(*c));
4176 assertx(!c || c->m_type != KindOfUninit);
4177 return c;
4182 OPTBLD_INLINE void iopMemoGet(PC& pc, PC notfound, LocalRange keys) {
4183 if (auto const c = memoGetImpl(keys)) {
4184 cellDup(*c, *vmStack().allocC());
4185 } else {
4186 pc = notfound;
4190 OPTBLD_INLINE void iopMemoGetEager(PC& pc,
4191 PC notfound,
4192 PC suspended,
4193 LocalRange keys) {
4194 assertx(vmfp()->m_func->isAsyncFunction());
4195 assertx(!vmfp()->resumed());
4197 if (auto const c = memoGetImpl(keys)) {
4198 cellDup(*c, *vmStack().allocC());
4199 if (c->m_aux.u_asyncNonEagerReturnFlag) {
4200 assertx(tvIsObject(c) && c->m_data.pobj->isWaitHandle());
4201 pc = suspended;
4203 } else {
4204 pc = notfound;
4208 namespace {
4210 OPTBLD_INLINE void memoSetImpl(LocalRange keys, Cell val) {
4211 assertx(vmfp()->m_func->isMemoizeWrapper());
4212 assertx(keys.first + keys.count <= vmfp()->m_func->numLocals());
4213 assertx(cellIsPlausible(val));
4215 for (auto i = 0; i < keys.count; ++i) {
4216 auto const key = frame_local(vmfp(), keys.first + i);
4217 if (!isIntType(key->m_type) && !isStringType(key->m_type)) {
4218 raise_error("Memoization keys can only be ints or strings");
4222 auto const func = vmfp()->m_func;
4223 if (!func->isMethod() || func->isStatic()) {
4224 auto const lsbCls =
4225 func->isMemoizeWrapperLSB() ? vmfp()->getClass() : nullptr;
4226 if (keys.count > 0) {
4227 auto cache =
4228 lsbCls ? rds::bindLSBMemoCache(lsbCls, func)
4229 : rds::bindStaticMemoCache(func);
4230 if (!cache.isInit()) cache.initWith(nullptr);
4231 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
4232 if (auto setter = memoCacheSetForKeyCount(keys.count)) {
4233 return setter(*cache, keysBegin, val);
4235 return memoCacheSetGeneric(
4236 *cache,
4237 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
4238 keysBegin,
4243 auto cache =
4244 lsbCls ? rds::bindLSBMemoValue(lsbCls, func)
4245 : rds::bindStaticMemoValue(func);
4246 if (!cache.isInit()) {
4247 tvWriteUninit(*cache);
4248 cache.markInit();
4251 cellSetWithAux(val, *cache);
4252 return;
4255 checkThis(vmfp());
4256 auto const this_ = vmfp()->getThis();
4257 auto const cls = func->cls();
4258 assertx(this_->instanceof(cls));
4259 assertx(cls->hasMemoSlots());
4261 this_->setAttribute(ObjectData::UsedMemoCache);
4263 auto const memoInfo = cls->memoSlotForFunc(func->getFuncId());
4265 auto slot = UNLIKELY(this_->hasNativeData())
4266 ? this_->memoSlotNativeData(memoInfo.first, cls->getNativeDataInfo()->sz)
4267 : this_->memoSlot(memoInfo.first);
4269 if (keys.count == 0 && !memoInfo.second) {
4270 cellSetWithAux(val, *slot->getValue());
4271 return;
4274 auto& cache = slot->getCacheForWrite();
4276 if (memoInfo.second) {
4277 if (keys.count == 0) {
4278 return memoCacheSetSharedOnly(
4279 cache,
4280 makeSharedOnlyKey(func->getFuncId()),
4284 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
4285 if (auto const setter = sharedMemoCacheSetForKeyCount(keys.count)) {
4286 return setter(cache, func->getFuncId(), keysBegin, val);
4288 return memoCacheSetGeneric(
4289 cache,
4290 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
4291 keysBegin,
4296 assertx(keys.count > 0);
4297 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
4298 if (auto const setter = memoCacheSetForKeyCount(keys.count)) {
4299 return setter(cache, keysBegin, val);
4301 return memoCacheSetGeneric(
4302 cache,
4303 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
4304 keysBegin,
4311 OPTBLD_INLINE void iopMemoSet(LocalRange keys) {
4312 auto val = *vmStack().topC();
4313 assertx(val.m_type != KindOfUninit);
4314 if (vmfp()->m_func->isAsyncFunction()) {
4315 assertx(tvIsObject(val) && val.m_data.pobj->isWaitHandle());
4316 val.m_aux.u_asyncNonEagerReturnFlag = -1;
4318 memoSetImpl(keys, val);
4321 OPTBLD_INLINE void iopMemoSetEager(LocalRange keys) {
4322 assertx(vmfp()->m_func->isAsyncFunction());
4323 assertx(!vmfp()->resumed());
4324 auto val = *vmStack().topC();
4325 assertx(val.m_type != KindOfUninit);
4326 val.m_aux.u_asyncNonEagerReturnFlag = 0;
4327 memoSetImpl(keys, val);
4330 static inline void vgetl_body(TypedValue* fr, TypedValue* to) {
4331 if (!isRefType(fr->m_type)) {
4332 tvBox(*fr);
4334 refDup(*fr, *to);
4337 OPTBLD_INLINE void iopVGetL(local_var fr) {
4338 Ref* to = vmStack().allocV();
4339 vgetl_body(fr.ptr, to);
4342 OPTBLD_INLINE void iopVGetG() {
4343 StringData* name;
4344 TypedValue* to = vmStack().topTV();
4345 TypedValue* fr = nullptr;
4346 lookupd_gbl(vmfp(), name, to, fr);
4347 SCOPE_EXIT { decRefStr(name); };
4348 assertx(fr != nullptr);
4349 tvDecRefGen(to);
4350 vgetl_body(fr, to);
4353 OPTBLD_INLINE void iopVGetS(clsref_slot slot) {
4354 getS<true>(slot);
4357 OPTBLD_INLINE void iopIssetG() {
4358 StringData* name;
4359 TypedValue* tv1 = vmStack().topTV();
4360 TypedValue* tv = nullptr;
4361 bool e;
4362 lookup_gbl(vmfp(), name, tv1, tv);
4363 SCOPE_EXIT { decRefStr(name); };
4364 if (tv == nullptr) {
4365 e = false;
4366 } else {
4367 e = !cellIsNull(tvToCell(tv));
4369 vmStack().replaceC<KindOfBoolean>(e);
4372 OPTBLD_INLINE void iopIssetS(clsref_slot slot) {
4373 SpropState ss(vmStack(), slot, true);
4374 bool e;
4375 if (!(ss.visible && ss.accessible)) {
4376 e = false;
4377 } else {
4378 e = !cellIsNull(tvToCell(ss.val));
4380 ss.output->m_data.num = e;
4381 ss.output->m_type = KindOfBoolean;
4384 OPTBLD_FLT_INLINE void iopIssetL(local_var tv) {
4385 bool ret = !is_null(tvToCell(tv.ptr));
4386 TypedValue* topTv = vmStack().allocTV();
4387 topTv->m_data.num = ret;
4388 topTv->m_type = KindOfBoolean;
4391 OPTBLD_INLINE static bool isTypeHelper(Cell* val, IsTypeOp op) {
4392 assertx(cellIsPlausible(*val));
4394 switch (op) {
4395 case IsTypeOp::Null: return is_null(val);
4396 case IsTypeOp::Bool: return is_bool(val);
4397 case IsTypeOp::Int: return is_int(val);
4398 case IsTypeOp::Dbl: return is_double(val);
4399 case IsTypeOp::Arr:
4400 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsArrayNotices &&
4401 !vmfp()->m_func->isBuiltin())) {
4402 if (isArrayOrShapeType(val->m_type)) {
4403 return true;
4404 } else if (isVecType(val->m_type)) {
4405 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_VEC_IS_ARR);
4406 } else if (isDictOrShapeType(val->m_type)) {
4407 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_DICT_IS_ARR);
4408 } else if (isKeysetType(val->m_type)) {
4409 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_KEYSET_IS_ARR);
4411 return false;
4413 return is_array(val);
4414 case IsTypeOp::Vec:
4415 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsVecDictNotices)) {
4416 if (isArrayType(val->m_type)) {
4417 if (val->m_data.parr->isVArray()) {
4418 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_VARR_IS_VEC);
4420 return false;
4423 return is_vec(val);
4424 case IsTypeOp::Dict:
4425 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsVecDictNotices)) {
4426 if (isArrayOrShapeType(val->m_type)) {
4427 if (val->m_data.parr->isDArray()) {
4428 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_DARR_IS_DICT);
4430 return false;
4433 return is_dict(val);
4434 case IsTypeOp::Keyset: return is_keyset(val);
4435 case IsTypeOp::Obj: return is_object(val);
4436 case IsTypeOp::Str: return is_string(val);
4437 case IsTypeOp::Res: return val->m_type == KindOfResource;
4438 case IsTypeOp::Scalar: return HHVM_FN(is_scalar)(tvAsCVarRef(val));
4439 case IsTypeOp::ArrLike:
4440 if (isClsMethType(val->m_type)) {
4441 if (RuntimeOption::EvalIsVecNotices) {
4442 raise_notice(Strings::CLSMETH_COMPAT_IS_ANY_ARR);
4444 return true;
4446 return isArrayLikeType(val->m_type);
4447 case IsTypeOp::VArray:
4448 assertx(!RuntimeOption::EvalHackArrDVArrs);
4449 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsVecDictNotices)) {
4450 if (isVecType(val->m_type)) {
4451 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_VEC_IS_VARR);
4452 return false;
4455 return is_varray(val);
4456 case IsTypeOp::DArray:
4457 assertx(!RuntimeOption::EvalHackArrDVArrs);
4458 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsVecDictNotices)) {
4459 if (isDictType(val->m_type)) {
4460 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_DICT_IS_DARR);
4461 return false;
4464 return is_darray(val);
4465 case IsTypeOp::ClsMeth: return is_clsmeth(val);
4467 not_reached();
4470 OPTBLD_INLINE void iopIsTypeL(local_var loc, IsTypeOp op) {
4471 if (loc.ptr->m_type == KindOfUninit) {
4472 raise_undefined_local(vmfp(), loc.index);
4474 vmStack().pushBool(isTypeHelper(tvToCell(loc.ptr), op));
4477 OPTBLD_INLINE void iopIsTypeC(IsTypeOp op) {
4478 auto val = vmStack().topC();
4479 vmStack().replaceC(make_tv<KindOfBoolean>(isTypeHelper(val, op)));
4482 OPTBLD_FLT_INLINE void iopAssertRATL(local_var loc, RepoAuthType rat) {
4483 if (debug) {
4484 auto const tv = *loc.ptr;
4485 auto const func = vmfp()->func();
4486 auto vm = &*g_context;
4487 always_assert_flog(
4488 tvMatchesRepoAuthType(tv, rat),
4489 "failed assert RATL on local {}: ${} in {}:{}, expected {}, got {}",
4490 loc.index,
4491 loc.index < func->numNamedLocals() ?
4492 func->localNames()[loc.index]->data() : "<unnamed>",
4493 vm->getContainingFileName()->data(),
4494 vm->getLine(),
4495 show(rat),
4496 toStringElm(&tv)
4501 OPTBLD_INLINE void iopAssertRATStk(uint32_t stkSlot, RepoAuthType rat) {
4502 if (debug) {
4503 auto const tv = *vmStack().indTV(stkSlot);
4504 auto vm = &*g_context;
4505 always_assert_flog(
4506 tvMatchesRepoAuthType(tv, rat),
4507 "failed assert RATStk {} in {}:{}, expected {}, got {}",
4508 stkSlot,
4509 vm->getContainingFileName()->data(),
4510 vm->getLine(),
4511 show(rat),
4512 toStringElm(&tv)
4517 OPTBLD_INLINE void iopBreakTraceHint() {
4520 OPTBLD_INLINE void iopEmptyL(local_var loc) {
4521 bool e = !cellToBool(*tvToCell(loc.ptr));
4522 vmStack().pushBool(e);
4525 OPTBLD_INLINE void iopEmptyG() {
4526 StringData* name;
4527 TypedValue* tv1 = vmStack().topTV();
4528 TypedValue* tv = nullptr;
4529 bool e;
4530 lookup_gbl(vmfp(), name, tv1, tv);
4531 SCOPE_EXIT { decRefStr(name); };
4532 if (tv == nullptr) {
4533 e = true;
4534 } else {
4535 e = !cellToBool(*tvToCell(tv));
4537 vmStack().replaceC<KindOfBoolean>(e);
4540 OPTBLD_INLINE void iopEmptyS(clsref_slot slot) {
4541 SpropState ss(vmStack(), slot, true);
4542 bool e;
4543 if (!(ss.visible && ss.accessible)) {
4544 e = true;
4545 } else {
4546 e = !cellToBool(*tvToCell(ss.val));
4548 ss.output->m_data.num = e;
4549 ss.output->m_type = KindOfBoolean;
4552 OPTBLD_INLINE void iopAKExists() {
4553 TypedValue* arr = vmStack().topTV();
4554 TypedValue* key = arr + 1;
4555 bool result = HHVM_FN(array_key_exists)(tvAsCVarRef(key), tvAsCVarRef(arr));
4556 vmStack().popTV();
4557 vmStack().replaceTV<KindOfBoolean>(result);
4560 OPTBLD_INLINE void iopGetMemoKeyL(local_var loc) {
4561 DEBUG_ONLY auto const func = vmfp()->m_func;
4562 assertx(func->isMemoizeWrapper());
4563 assertx(!func->anyByRef());
4565 assertx(tvIsPlausible(*loc.ptr));
4567 if (UNLIKELY(loc.ptr->m_type == KindOfUninit)) {
4568 tvWriteNull(*loc.ptr);
4569 raise_undefined_local(vmfp(), loc.index);
4571 auto const cell = tvToCell(loc.ptr);
4573 // Use the generic scheme, which is performed by
4574 // serialize_memoize_param.
4575 auto const key = HHVM_FN(serialize_memoize_param)(*cell);
4576 cellCopy(key, *vmStack().allocC());
4579 namespace {
4580 const StaticString s_idx("hh\\idx");
4582 TypedValue genericIdx(TypedValue obj, TypedValue key, TypedValue def) {
4583 static auto func = Unit::loadFunc(s_idx.get());
4584 assertx(func != nullptr);
4585 TypedValue args[] = {
4586 obj,
4587 key,
4590 return g_context->invokeFuncFew(func, nullptr, nullptr, 3, &args[0]);
4594 OPTBLD_INLINE void iopIdx() {
4595 TypedValue* def = vmStack().topTV();
4596 TypedValue* key = vmStack().indTV(1);
4597 TypedValue* arr = vmStack().indTV(2);
4599 TypedValue result;
4600 if (isArrayLikeType(arr->m_type)) {
4601 result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
4602 tvAsCVarRef(key),
4603 tvAsCVarRef(def));
4604 vmStack().popTV();
4605 } else if (isNullType(key->m_type)) {
4606 tvDecRefGen(arr);
4607 *arr = *def;
4608 vmStack().ndiscard(2);
4609 return;
4610 } else if (!isStringType(arr->m_type) &&
4611 arr->m_type != KindOfObject) {
4612 result = *def;
4613 vmStack().discard();
4614 } else {
4615 result = genericIdx(*arr, *key, *def);
4616 vmStack().popTV();
4618 vmStack().popTV();
4619 tvDecRefGen(arr);
4620 *arr = result;
4623 OPTBLD_INLINE void iopArrayIdx() {
4624 TypedValue* def = vmStack().topTV();
4625 TypedValue* key = vmStack().indTV(1);
4626 TypedValue* arr = vmStack().indTV(2);
4627 if (isClsMethType(type(arr))) {
4628 if (RuntimeOption::EvalHackArrDVArrs) {
4629 tvCastToVecInPlace(arr);
4630 } else {
4631 tvCastToVArrayInPlace(arr);
4634 auto const result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
4635 tvAsCVarRef(key),
4636 tvAsCVarRef(def));
4637 vmStack().popTV();
4638 vmStack().popTV();
4639 tvDecRefGen(arr);
4640 *arr = result;
4643 OPTBLD_INLINE void iopSetL(local_var to) {
4644 assertx(to.index < vmfp()->m_func->numLocals());
4645 Cell* fr = vmStack().topC();
4646 tvSet(*fr, *to);
4649 OPTBLD_INLINE void iopSetG() {
4650 StringData* name;
4651 Cell* fr = vmStack().topC();
4652 TypedValue* tv2 = vmStack().indTV(1);
4653 TypedValue* to = nullptr;
4654 lookupd_gbl(vmfp(), name, tv2, to);
4655 SCOPE_EXIT { decRefStr(name); };
4656 assertx(to != nullptr);
4657 tvSet(*fr, *to);
4658 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
4659 vmStack().discard();
4662 OPTBLD_INLINE void iopSetS(clsref_slot cslot) {
4663 TypedValue* tv1 = vmStack().topTV();
4664 Class* cls = cslot.take().second;
4665 TypedValue* propn = vmStack().indTV(1);
4666 TypedValue* output = propn;
4667 StringData* name;
4668 TypedValue* val;
4669 bool visible, accessible;
4670 Slot slot;
4671 lookup_sprop(vmfp(), cls, name, propn, val, slot, visible, accessible, true);
4672 SCOPE_EXIT { decRefStr(name); };
4673 if (!(visible && accessible)) {
4674 raise_error("Invalid static property access: %s::%s",
4675 cls->name()->data(),
4676 name->data());
4678 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
4679 auto const& sprop = cls->staticProperties()[slot];
4680 auto const& tc = sprop.typeConstraint;
4681 if (tc.isCheckable()) tc.verifyStaticProperty(tv1, cls, sprop.cls, name);
4683 tvSet(*tv1, *val);
4684 tvDecRefGen(propn);
4685 memcpy(output, tv1, sizeof(TypedValue));
4686 vmStack().ndiscard(1);
4689 OPTBLD_INLINE void iopSetOpL(local_var loc, SetOpOp op) {
4690 Cell* fr = vmStack().topC();
4691 Cell* to = tvToCell(loc.ptr);
4692 setopBody(to, op, fr);
4693 tvDecRefGen(fr);
4694 cellDup(*to, *fr);
4697 OPTBLD_INLINE void iopSetOpG(SetOpOp op) {
4698 StringData* name;
4699 Cell* fr = vmStack().topC();
4700 TypedValue* tv2 = vmStack().indTV(1);
4701 TypedValue* to = nullptr;
4702 // XXX We're probably not getting warnings totally correct here
4703 lookupd_gbl(vmfp(), name, tv2, to);
4704 SCOPE_EXIT { decRefStr(name); };
4705 assertx(to != nullptr);
4706 setopBody(tvToCell(to), op, fr);
4707 tvDecRefGen(fr);
4708 tvDecRefGen(tv2);
4709 cellDup(*tvToCell(to), *tv2);
4710 vmStack().discard();
4713 OPTBLD_INLINE void iopSetOpS(SetOpOp op, clsref_slot cslot) {
4714 Cell* fr = vmStack().topC();
4715 Class* cls = cslot.take().second;
4716 TypedValue* propn = vmStack().indTV(1);
4717 TypedValue* output = propn;
4718 StringData* name;
4719 TypedValue* val;
4720 bool visible, accessible;
4721 Slot slot;
4722 lookup_sprop(vmfp(), cls, name, propn, val, slot, visible, accessible, false);
4723 SCOPE_EXIT { decRefStr(name); };
4724 if (!(visible && accessible)) {
4725 raise_error("Invalid static property access: %s::%s",
4726 cls->name()->data(),
4727 name->data());
4730 val = tvToCell(val);
4731 auto const& sprop = cls->staticProperties()[slot];
4732 if (setOpNeedsTypeCheck(sprop.typeConstraint, op, val)) {
4733 Cell temp;
4734 cellDup(*val, temp);
4735 SCOPE_FAIL { tvDecRefGen(&temp); };
4736 setopBody(&temp, op, fr);
4737 sprop.typeConstraint.verifyStaticProperty(
4738 &temp, cls, sprop.cls, name
4740 cellMove(temp, *val);
4741 } else {
4742 setopBody(val, op, fr);
4745 tvDecRefGen(propn);
4746 tvDecRefGen(fr);
4747 cellDup(*val, *output);
4748 vmStack().ndiscard(1);
4751 OPTBLD_INLINE void iopIncDecL(local_var fr, IncDecOp op) {
4752 TypedValue* to = vmStack().allocTV();
4753 tvWriteUninit(*to);
4754 if (UNLIKELY(fr.ptr->m_type == KindOfUninit)) {
4755 raise_undefined_local(vmfp(), fr.index);
4756 tvWriteNull(*fr.ptr);
4757 } else {
4758 fr.ptr = tvToCell(fr.ptr);
4760 cellCopy(IncDecBody(op, fr.ptr), *to);
4763 OPTBLD_INLINE void iopIncDecG(IncDecOp op) {
4764 StringData* name;
4765 TypedValue* nameCell = vmStack().topTV();
4766 TypedValue* gbl = nullptr;
4767 lookupd_gbl(vmfp(), name, nameCell, gbl);
4768 auto oldNameCell = *nameCell;
4769 SCOPE_EXIT {
4770 decRefStr(name);
4771 tvDecRefGen(oldNameCell);
4773 assertx(gbl != nullptr);
4774 cellCopy(IncDecBody(op, tvToCell(gbl)), *nameCell);
4777 OPTBLD_INLINE void iopIncDecS(IncDecOp op, clsref_slot slot) {
4778 SpropState ss(vmStack(), slot, false);
4779 if (!(ss.visible && ss.accessible)) {
4780 raise_error("Invalid static property access: %s::%s",
4781 ss.cls->name()->data(),
4782 ss.name->data());
4785 auto const checkable_sprop = [&]() -> const Class::SProp* {
4786 if (RuntimeOption::EvalCheckPropTypeHints <= 0) return nullptr;
4787 auto const& sprop = ss.cls->staticProperties()[ss.slot];
4788 return sprop.typeConstraint.isCheckable() ? &sprop : nullptr;
4789 }();
4791 auto const val = tvToCell(ss.val);
4792 if (checkable_sprop) {
4793 Cell temp;
4794 cellDup(*val, temp);
4795 SCOPE_FAIL { tvDecRefGen(&temp); };
4796 auto result = IncDecBody(op, &temp);
4797 SCOPE_FAIL { tvDecRefGen(&result); };
4798 checkable_sprop->typeConstraint.verifyStaticProperty(
4799 &temp,
4800 ss.cls,
4801 checkable_sprop->cls,
4802 ss.name
4804 cellMove(temp, *val);
4805 cellCopy(result, *ss.output);
4806 } else {
4807 cellCopy(IncDecBody(op, val), *ss.output);
4811 OPTBLD_INLINE void iopBindL(local_var to) {
4812 Ref* fr = vmStack().topV();
4813 tvBind(*fr, *to.ptr);
4816 OPTBLD_INLINE void iopBindG() {
4817 StringData* name;
4818 TypedValue* fr = vmStack().topTV();
4819 TypedValue* nameTV = vmStack().indTV(1);
4820 TypedValue* to = nullptr;
4821 lookupd_gbl(vmfp(), name, nameTV, to);
4822 SCOPE_EXIT { decRefStr(name); };
4823 assertx(to != nullptr);
4824 tvBind(*fr, *to);
4825 memcpy((void*)nameTV, (void*)fr, sizeof(TypedValue));
4826 vmStack().discard();
4829 OPTBLD_INLINE void iopBindS(clsref_slot cslot) {
4830 TypedValue* fr = vmStack().topTV();
4831 Class* cls = cslot.take().second;
4832 TypedValue* propn = vmStack().indTV(1);
4833 TypedValue* output = propn;
4834 StringData* name;
4835 TypedValue* val;
4836 bool visible, accessible;
4837 Slot slot;
4838 lookup_sprop(vmfp(), cls, name, propn, val, slot, visible, accessible, false);
4839 SCOPE_EXIT { decRefStr(name); };
4840 if (!(visible && accessible)) {
4841 raise_error("Invalid static property access: %s::%s",
4842 cls->name()->data(),
4843 name->data());
4846 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
4847 auto const& sprop = cls->staticProperties()[slot];
4848 auto const& tc = sprop.typeConstraint;
4849 if (!tc.isMixedResolved()) {
4850 raise_property_typehint_binding_error(
4851 sprop.cls,
4852 sprop.name,
4853 true,
4854 tc.isSoft()
4859 tvBind(*fr, *val);
4860 tvDecRefGen(propn);
4861 memcpy(output, fr, sizeof(TypedValue));
4862 vmStack().ndiscard(1);
4865 OPTBLD_INLINE void iopUnsetL(local_var loc) {
4866 tvUnset(*loc.ptr);
4869 OPTBLD_INLINE void iopUnsetG() {
4870 TypedValue* tv1 = vmStack().topTV();
4871 StringData* name = lookup_name(tv1);
4872 SCOPE_EXIT { decRefStr(name); };
4873 VarEnv* varEnv = g_context->m_globalVarEnv;
4874 assertx(varEnv != nullptr);
4875 varEnv->unset(name);
4876 vmStack().popC();
4879 OPTBLD_INLINE ActRec* fPushFuncImpl(
4880 const Func* func, int numArgs, ArrayData* reifiedGenerics
4882 DEBUGGER_IF(phpBreakpointEnabled(func->name()->data()));
4883 ActRec* ar = vmStack().allocA();
4884 ar->m_func = func;
4885 ar->initNumArgs(numArgs);
4886 ar->trashVarEnv();
4887 if (reifiedGenerics != nullptr) ar->setReifiedGenerics(reifiedGenerics);
4888 return ar;
4891 ALWAYS_INLINE std::string concat_arg_list(imm_array<uint32_t> args) {
4892 auto const n = args.size;
4893 assertx(n != 0);
4894 std::string ret;
4895 folly::toAppend(args[0], &ret);
4896 for (int i = 1; i != n; ++i) folly::toAppend(";", args[i], &ret);
4897 return ret;
4900 OPTBLD_INLINE void iopResolveFunc(Id id) {
4901 auto unit = vmfp()->m_func->unit();
4902 auto const nep = unit->lookupNamedEntityPairId(id);
4903 auto func = Unit::loadFunc(nep.second, nep.first);
4904 if (func == nullptr) raise_resolve_undefined(unit->lookupLitstrId(id));
4905 vmStack().pushFunc(func);
4908 OPTBLD_INLINE void iopFPushFunc(uint32_t numArgs, imm_array<uint32_t> args) {
4909 auto const n = args.size;
4910 std::string arglist;
4911 if (UNLIKELY(n)) {
4912 arglist = concat_arg_list(args);
4915 Cell* c1 = vmStack().topC();
4916 if (c1->m_type == KindOfObject) {
4917 // this covers both closures and functors
4918 static StringData* invokeName = makeStaticString("__invoke");
4919 ObjectData* origObj = c1->m_data.pobj;
4920 const Class* cls = origObj->getVMClass();
4921 auto const func = LIKELY(!n)
4922 ? cls->lookupMethod(invokeName)
4923 : cls->lookupMethod(
4924 makeStaticString(folly::sformat("__invoke${}$inout", arglist))
4926 if (func == nullptr) {
4927 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4930 vmStack().discard();
4931 ActRec* ar = fPushFuncImpl(func, numArgs, nullptr);
4932 if (func->isStaticInPrologue()) {
4933 ar->setClass(origObj->getVMClass());
4934 decRefObj(origObj);
4935 } else {
4936 ar->setThis(origObj);
4937 // Teleport the reference from the destroyed stack cell to the
4938 // ActRec. Don't try this at home.
4940 return;
4943 auto appendSuffix = [&] (const StringData* s) {
4944 return StringData::Make(s, folly::sformat("${}$inout", arglist));
4947 if (isArrayLikeType(c1->m_type) || isStringType(c1->m_type)) {
4948 Variant v = Variant::wrap(*c1);
4950 auto wrapInOutName = [&] (Cell* c, const StringData* mth) {
4951 VArrayInit ai{2};
4952 ai.append(c->m_data.parr->at(int64_t(0)));
4953 ai.append(Variant::attach(appendSuffix(mth)));
4954 return ai.toVariant();
4957 // Handle inout name mangling
4958 if (UNLIKELY(n)) {
4959 if (isStringType(c1->m_type)) {
4960 v = Variant::attach(appendSuffix(c1->m_data.pstr));
4961 } else if (c1->m_data.parr->size() == 2){
4962 auto s = c1->m_data.parr->at(1);
4963 if (isStringType(s.m_type)) {
4964 v = wrapInOutName(c1, s.m_data.pstr);
4965 } else if (isFuncType(s.m_type)) {
4966 v = wrapInOutName(c1, s.m_data.pfunc->fullDisplayName());
4971 // support:
4972 // array($instance, 'method')
4973 // array('Class', 'method'),
4974 // vec[$instance, 'method'],
4975 // vec['Class', 'method'],
4976 // array(Class*, Func*),
4977 // array(ObjectData*, Func*),
4978 // Func*,
4979 // 'func_name'
4980 // 'class::method'
4981 // which are all valid callables
4982 auto origCell = *c1;
4983 ObjectData* thiz = nullptr;
4984 HPHP::Class* cls = nullptr;
4985 StringData* invName = nullptr;
4986 bool dynamic = false;
4987 ArrayData* reifiedGenerics = nullptr;
4989 auto const func = vm_decode_function(
4991 vmfp(),
4992 thiz,
4993 cls,
4994 invName,
4995 dynamic,
4996 reifiedGenerics,
4997 DecodeFlags::NoWarn
4999 assertx(dynamic);
5000 if (func == nullptr) {
5001 if (isArrayLikeType(origCell.m_type)) {
5002 raise_error("Invalid callable (array)");
5003 } else {
5004 assertx(isStringType(origCell.m_type));
5005 raise_call_to_undefined(origCell.m_data.pstr);
5009 vmStack().discard();
5010 auto const ar = fPushFuncImpl(func, numArgs, reifiedGenerics);
5011 if (thiz) {
5012 thiz->incRefCount();
5013 ar->setThis(thiz);
5014 } else if (cls) {
5015 ar->setClass(cls);
5016 } else {
5017 ar->trashThis();
5020 ar->setDynamicCall();
5022 if (UNLIKELY(invName != nullptr)) {
5023 ar->setMagicDispatch(invName);
5025 if (isArrayLikeType(origCell.m_type)) {
5026 decRefArr(origCell.m_data.parr);
5027 } else if (origCell.m_type == KindOfString) {
5028 decRefStr(origCell.m_data.pstr);
5030 return;
5033 if (c1->m_type == KindOfFunc) {
5034 const Func* func = c1->m_data.pfunc;
5035 assertx(func != nullptr);
5036 ArrayData* reifiedGenerics = nullptr;
5038 // Handle inout name mangling
5039 if (UNLIKELY(n)) {
5040 auto const func_name = c1->m_data.pfunc->fullDisplayName();
5041 auto const v = Variant::attach(appendSuffix(func_name));
5042 ObjectData* thiz = nullptr;
5043 Class* cls = nullptr;
5044 StringData* invName = nullptr;
5045 bool dynamic = false;
5046 func = vm_decode_function(
5048 vmfp(),
5049 thiz,
5050 cls,
5051 invName,
5052 dynamic,
5053 reifiedGenerics,
5054 DecodeFlags::NoWarn
5056 if (func == nullptr) raise_call_to_undefined(func_name);
5058 vmStack().discard();
5059 auto const ar = fPushFuncImpl(func, numArgs, reifiedGenerics);
5060 ar->trashThis();
5061 return;
5064 if (isClsMethType(c1->m_type)) {
5065 auto const clsMeth = c1->m_data.pclsmeth;
5066 assertx(clsMeth->getCls());
5067 assertx(clsMeth->getFunc());
5069 ArrayData* reifiedGenerics = nullptr;
5070 const Func* func = clsMeth->getFunc();
5071 ObjectData* thiz = nullptr;
5072 Class* cls = clsMeth->getCls();
5074 // Handle inout name mangling
5075 if (UNLIKELY(n)) {
5076 auto const func_name = func->fullDisplayName();
5077 auto const v = Variant::attach(appendSuffix(func_name));
5078 bool dynamic = false;
5079 StringData* invName = nullptr;
5080 func = vm_decode_function(
5082 vmfp(),
5083 thiz,
5084 cls,
5085 invName,
5086 dynamic,
5087 reifiedGenerics,
5088 DecodeFlags::NoWarn
5090 if (func == nullptr) raise_call_to_undefined(func_name);
5092 vmStack().popC();
5093 auto const ar = fPushFuncImpl(func, numArgs, reifiedGenerics);
5094 if (thiz) {
5095 ar->setThis(thiz);
5096 } else if (cls) {
5097 ar->setClass(cls);
5098 } else {
5099 ar->trashThis();
5102 ar->setDynamicCall();
5103 return;
5106 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5109 OPTBLD_FLT_INLINE void iopFPushFuncD(uint32_t numArgs, Id id) {
5110 const NamedEntityPair nep =
5111 vmfp()->m_func->unit()->lookupNamedEntityPairId(id);
5112 Func* func = Unit::loadFunc(nep.second, nep.first);
5113 if (func == nullptr) {
5114 raise_call_to_undefined(vmfp()->m_func->unit()->lookupLitstrId(id));
5116 ActRec* ar = fPushFuncImpl(func, numArgs, nullptr);
5117 ar->trashThis();
5120 OPTBLD_INLINE void iopFPushFuncU(uint32_t numArgs, Id nsFunc, Id globalFunc) {
5121 Unit* unit = vmfp()->m_func->unit();
5122 const NamedEntityPair nep = unit->lookupNamedEntityPairId(nsFunc);
5123 Func* func = Unit::loadFunc(nep.second, nep.first);
5124 if (func == nullptr) {
5125 const NamedEntityPair nep2 = unit->lookupNamedEntityPairId(globalFunc);
5126 raise_undefined_function_fallback_notice(nep.first, nep2.first);
5127 func = Unit::loadFunc(nep2.second, nep2.first);
5128 if (func == nullptr) {
5129 raise_call_to_undefined(unit->lookupLitstrId(nsFunc));
5132 ActRec* ar = fPushFuncImpl(func, numArgs, nullptr);
5133 ar->trashThis();
5136 void fPushObjMethodImpl(StringData* name,
5137 ObjectData* obj,
5138 int numArgs,
5139 bool dynamic) {
5140 const Func* f;
5141 LookupResult res;
5142 auto cls = obj->getVMClass();
5143 try {
5144 res = lookupObjMethod(
5145 f, cls, name, arGetContextClass(vmfp()), true);
5146 } catch (...) {
5147 decRefObj(obj);
5148 throw;
5150 assertx(f);
5151 ActRec* ar = vmStack().allocA();
5152 ar->m_func = f;
5153 if (res == LookupResult::MethodFoundNoThis) {
5154 decRefObj(obj);
5155 ar->setClass(cls);
5157 if (RuntimeOption::EvalNoticeOnBadMethodStaticness) {
5158 raise_notice(
5159 "Static method %s should not be called on instance",
5160 ar->func()->fullName()->data()
5163 } else {
5164 assertx(res == LookupResult::MethodFoundWithThis ||
5165 res == LookupResult::MagicCallFound);
5166 /* Transfer ownership of obj to the ActRec*/
5167 ar->setThis(obj);
5169 ar->initNumArgs(numArgs);
5170 if (dynamic) ar->setDynamicCall();
5171 if (res == LookupResult::MagicCallFound) {
5172 ar->setMagicDispatch(name);
5173 } else {
5174 ar->trashVarEnv();
5175 decRefStr(name);
5177 if (f->hasReifiedGenerics()) {
5178 assertx(isReifiedName(name));
5179 auto const reifiedGenerics =
5180 getReifiedTypeList(stripClsOrFnNameFromReifiedName(name));
5181 ar->setReifiedGenerics(reifiedGenerics);
5185 void fPushNullObjMethod(int numArgs) {
5186 assertx(SystemLib::s_nullFunc);
5187 ActRec* ar = vmStack().allocA();
5188 ar->m_func = SystemLib::s_nullFunc;
5189 ar->trashThis();
5190 ar->initNumArgs(numArgs);
5191 ar->trashVarEnv();
5192 ar->setDynamicCall();
5195 static void raise_resolve_non_object(const char* methodName,
5196 const char* typeName = nullptr) {
5197 auto const msg = folly::sformat(
5198 "Cannot resolve a member function {}() on a non-object ({})",
5199 methodName, typeName
5202 raise_fatal_error(msg.c_str());
5205 static void throw_call_non_object(const char* methodName,
5206 const char* typeName = nullptr) {
5207 std::string msg;
5208 folly::format(&msg, "Call to a member function {}() on a non-object ({})",
5209 methodName, typeName);
5211 if (RuntimeOption::ThrowExceptionOnBadMethodCall) {
5212 SystemLib::throwBadMethodCallExceptionObject(String(msg));
5214 raise_fatal_error(msg.c_str());
5217 ALWAYS_INLINE StringData* mangleInOutName(
5218 const StringData* name,
5219 imm_array<uint32_t> args
5221 return
5222 StringData::Make(
5223 name, folly::sformat("${}$inout", concat_arg_list(args))
5227 OPTBLD_INLINE void iopFPushObjMethod(uint32_t numArgs, ObjMethodOp op,
5228 imm_array<uint32_t> args) {
5229 Cell* c1 = vmStack().topC(); // Method name.
5230 if (!isStringType(c1->m_type)) {
5231 raise_error(Strings::METHOD_NAME_MUST_BE_STRING);
5234 Cell* c2 = vmStack().indC(1); // Object.
5235 if (c2->m_type != KindOfObject) {
5236 if (UNLIKELY(op == ObjMethodOp::NullThrows || !isNullType(c2->m_type))) {
5237 throw_call_non_object(c1->m_data.pstr->data(),
5238 getDataTypeString(c2->m_type).get()->data());
5240 vmStack().popC();
5241 vmStack().popC();
5242 fPushNullObjMethod(numArgs);
5243 return;
5245 ObjectData* obj = c2->m_data.pobj;
5246 StringData* name = c1->m_data.pstr;
5248 if (UNLIKELY(args.size)) {
5249 String s = String::attach(name);
5250 name = mangleInOutName(name, args);
5253 // We handle decReffing obj and name in fPushObjMethodImpl
5254 vmStack().ndiscard(2);
5255 fPushObjMethodImpl(name, obj, numArgs, true);
5258 OPTBLD_INLINE void
5259 iopFPushObjMethodD(uint32_t numArgs, const StringData* name, ObjMethodOp op) {
5260 Cell* c1 = vmStack().topC();
5261 if (c1->m_type != KindOfObject) {
5262 if (UNLIKELY(op == ObjMethodOp::NullThrows || !isNullType(c1->m_type))) {
5263 throw_call_non_object(name->data(),
5264 getDataTypeString(c1->m_type).get()->data());
5266 vmStack().popC();
5267 fPushNullObjMethod(numArgs);
5268 return;
5270 ObjectData* obj = c1->m_data.pobj;
5271 // We handle decReffing obj in fPushObjMethodImpl
5272 vmStack().discard();
5273 fPushObjMethodImpl(const_cast<StringData*>(name), obj, numArgs, false);
5276 namespace {
5277 void resolveMethodImpl(Cell* c1, Cell* c2) {
5278 auto name = c1->m_data.pstr;
5279 ObjectData* thiz = nullptr;
5280 HPHP::Class* cls = nullptr;
5281 StringData* invName = nullptr;
5282 bool dynamic = false;
5283 ArrayData* reifiedGenerics = nullptr;
5284 auto arr = make_varray(cellAsVariant(*c2), cellAsVariant(*c1));
5285 auto const func = vm_decode_function(
5286 Variant{arr},
5287 vmfp(),
5288 thiz,
5289 cls,
5290 invName,
5291 dynamic,
5292 reifiedGenerics,
5293 DecodeFlags::NoWarn
5295 assertx(dynamic);
5296 if (!func) raise_error("Failure to resolve method name \'%s\'", name->data());
5297 if (invName) {
5298 SystemLib::throwInvalidOperationExceptionObject(
5299 "Unable to resolve magic call for inst_meth()");
5301 if (thiz) {
5302 assertx(isObjectType(type(c2)));
5303 assertx(!(func->attrs() & AttrStatic));
5304 assertx(val(c2).pobj == thiz);
5305 } else {
5306 assertx(cls);
5307 assertx(func->attrs() & AttrStatic);
5308 arr.set(0, Variant{cls});
5310 arr.set(1, Variant{func});
5311 vmStack().popC();
5312 vmStack().popC();
5313 if (RuntimeOption::EvalHackArrDVArrs) {
5314 vmStack().pushVecNoRc(arr.detach());
5315 } else {
5316 vmStack().pushArrayNoRc(arr.detach());
5321 OPTBLD_INLINE void iopResolveClsMethod() {
5322 Cell* func = vmStack().topC();
5323 Cell* cls = vmStack().indC(1);
5324 if (!isStringType(func->m_type) || !isStringType(cls->m_type)) {
5325 raise_error(!isStringType(func->m_type) ?
5326 Strings::METHOD_NAME_MUST_BE_STRING : "class name must be a string.");
5329 StringData* invName = nullptr;
5330 auto const decoded_func = decode_for_clsmeth(
5331 StrNR{val(cls).pstr}, StrNR{val(func).pstr}, vmfp(), invName,
5332 DecodeFlags::NoWarn);
5333 if (!decoded_func.first || !decoded_func.second) {
5334 if (!decoded_func.first) {
5335 raise_error("Failure to resolve class name \'%s\'",
5336 val(cls).pstr->data());
5337 } else {
5338 raise_error("Failure to resolve method name \'%s\'",
5339 val(func).pstr->data());
5342 if (invName) {
5343 SystemLib::throwInvalidOperationExceptionObject(
5344 "Unable to resolve magic call for class_meth()");
5347 ClsMethDataRef clsMeth =
5348 ClsMethDataRef::create(decoded_func.first, decoded_func.second);
5349 vmStack().popC();
5350 vmStack().popC();
5351 vmStack().pushClsMethNoRc(clsMeth);
5354 OPTBLD_INLINE void iopResolveObjMethod() {
5355 Cell* c1 = vmStack().topC();
5356 Cell* c2 = vmStack().indC(1);
5357 if (!isStringType(c1->m_type)) {
5358 raise_error(Strings::METHOD_NAME_MUST_BE_STRING);
5360 auto name = c1->m_data.pstr;
5361 if (!isObjectType(c2->m_type)) {
5362 raise_resolve_non_object(name->data(),
5363 getDataTypeString(c2->m_type).get()->data());
5365 resolveMethodImpl(c1, c2);
5368 namespace {
5370 void pushClsMethodImpl(Class* cls,
5371 StringData* name,
5372 int numArgs,
5373 bool forwarding,
5374 bool dynamic) {
5375 auto const ctx = liveClass();
5376 auto obj = ctx && vmfp()->hasThis() ? vmfp()->getThis() : nullptr;
5377 const Func* f;
5378 auto const res = lookupClsMethod(f, cls, name, obj, ctx, true);
5379 if (res == LookupResult::MethodFoundNoThis ||
5380 res == LookupResult::MagicCallStaticFound) {
5381 if (!f->isStaticInPrologue()) {
5382 raise_missing_this(f);
5384 obj = nullptr;
5385 } else {
5386 assertx(obj);
5387 assertx(res == LookupResult::MethodFoundWithThis ||
5388 res == LookupResult::MagicCallFound);
5389 obj->incRefCount();
5391 assertx(f);
5392 ActRec* ar = vmStack().allocA();
5393 ar->m_func = f;
5394 if (obj) {
5395 ar->setThis(obj);
5396 } else {
5397 if (forwarding && ctx) {
5398 /* Propagate the current late bound class if there is one, */
5399 /* otherwise use the class given by this instruction's input */
5400 if (vmfp()->hasThis()) {
5401 cls = vmfp()->getThis()->getVMClass();
5402 } else {
5403 cls = vmfp()->getClass();
5406 ar->setClass(cls);
5408 ar->initNumArgs(numArgs);
5409 if (dynamic) ar->setDynamicCall();
5410 if (res == LookupResult::MagicCallFound ||
5411 res == LookupResult::MagicCallStaticFound) {
5412 ar->setMagicDispatch(name);
5413 } else {
5414 ar->trashVarEnv();
5415 decRefStr(const_cast<StringData*>(name));
5418 if (f->hasReifiedGenerics()) {
5419 assertx(isReifiedName(name));
5420 auto const reifiedGenerics =
5421 getReifiedTypeList(stripClsOrFnNameFromReifiedName(name));
5422 ar->setReifiedGenerics(reifiedGenerics);
5426 Class* specialClsRefToCls(SpecialClsRef ref) {
5427 switch (ref) {
5428 case SpecialClsRef::Static:
5429 if (auto const cls = frameStaticClass(vmfp())) return cls;
5430 raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
5431 case SpecialClsRef::Self:
5432 if (auto const cls = arGetContextClass(vmfp())) return cls;
5433 raise_error(HPHP::Strings::CANT_ACCESS_SELF);
5434 case SpecialClsRef::Parent:
5435 if (auto const cls = arGetContextClass(vmfp())) {
5436 if (auto const parent = cls->parent()) return parent;
5437 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
5439 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
5441 always_assert(false);
5446 OPTBLD_INLINE void iopFPushClsMethod(uint32_t numArgs, clsref_slot slot,
5447 imm_array<uint32_t> args) {
5448 auto const c1 = vmStack().topC(); // Method name.
5449 if (!isStringType(c1->m_type)) {
5450 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5452 auto const cls = slot.take().second;
5453 auto name = c1->m_data.pstr;
5455 if (UNLIKELY(args.size)) {
5456 String s = String::attach(name);
5457 name = mangleInOutName(name, args);
5460 // pushClsMethodImpl will take care of decReffing name
5461 vmStack().ndiscard(1);
5462 assertx(cls && name);
5463 pushClsMethodImpl(cls, name, numArgs, false, true);
5466 OPTBLD_INLINE
5467 void iopFPushClsMethodD(uint32_t numArgs, const StringData* name, Id classId) {
5468 const NamedEntityPair &nep =
5469 vmfp()->m_func->unit()->lookupNamedEntityPairId(classId);
5470 Class* cls = Unit::loadClass(nep.second, nep.first);
5471 if (cls == nullptr) {
5472 raise_error(Strings::UNKNOWN_CLASS, nep.first->data());
5474 pushClsMethodImpl(cls, const_cast<StringData*>(name), numArgs, false, false);
5477 OPTBLD_INLINE void iopFPushClsMethodS(uint32_t numArgs, SpecialClsRef ref,
5478 imm_array<uint32_t> args) {
5479 auto const c1 = vmStack().topC(); // Method name.
5480 if (!isStringType(c1->m_type)) {
5481 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5483 auto const cls = specialClsRefToCls(ref);
5484 auto name = c1->m_data.pstr;
5486 if (UNLIKELY(args.size)) {
5487 String s = String::attach(name);
5488 name = mangleInOutName(name, args);
5491 // pushClsMethodImpl will take care of decReffing name
5492 vmStack().ndiscard(1);
5493 pushClsMethodImpl(
5494 cls,
5495 name,
5496 numArgs,
5497 ref == SpecialClsRef::Self || ref == SpecialClsRef::Parent,
5498 true
5502 OPTBLD_INLINE void iopFPushClsMethodSD(uint32_t numArgs,
5503 SpecialClsRef ref,
5504 const StringData* name) {
5505 pushClsMethodImpl(
5506 specialClsRefToCls(ref),
5507 const_cast<StringData*>(name),
5508 numArgs,
5509 ref == SpecialClsRef::Self || ref == SpecialClsRef::Parent,
5510 false
5514 namespace {
5516 void newObjImpl(Class* cls, ArrayData* reified_types) {
5517 // Replace input with uninitialized instance.
5518 auto this_ = !reified_types ? newInstance(cls)
5519 : newInstanceReified(cls, reified_types);
5520 TRACE(2, "NewObj: just new'ed an instance of class %s: %p\n",
5521 cls->name()->data(), this_);
5522 vmStack().pushObjectNoRc(this_);
5527 OPTBLD_INLINE void iopNewObj(clsref_slot slot, HasGenericsOp op) {
5528 auto cls_ref = slot.take();
5529 callerDynamicConstructChecks(cls_ref.second);
5530 auto const reified_types =
5531 HasGenericsOp::NoGenerics != op ? cls_ref.first : nullptr;
5532 newObjImpl(cls_ref.second, reified_types);
5535 OPTBLD_INLINE void iopNewObjD(Id id) {
5536 const NamedEntityPair &nep =
5537 vmfp()->m_func->unit()->lookupNamedEntityPairId(id);
5538 auto cls = Unit::loadClass(nep.second, nep.first);
5539 if (cls == nullptr) {
5540 raise_error(Strings::UNKNOWN_CLASS,
5541 vmfp()->m_func->unit()->lookupLitstrId(id)->data());
5543 newObjImpl(cls, nullptr);
5546 OPTBLD_INLINE void iopNewObjS(SpecialClsRef ref) {
5547 auto const cls = specialClsRefToCls(ref);
5548 if (ref == SpecialClsRef::Static && cls->hasReifiedGenerics()) {
5549 raise_error(Strings::NEW_STATIC_ON_REIFIED_CLASS, cls->name()->data());
5551 auto const reified_generics = cls->hasReifiedGenerics()
5552 ? getClsReifiedGenericsProp(cls, vmfp()) : nullptr;
5553 newObjImpl(cls, reified_generics);
5556 OPTBLD_INLINE void iopFPushCtor(uint32_t numArgs) {
5557 assertx(tvIsObject(vmStack().topC()));
5558 auto const obj = vmStack().topC()->m_data.pobj;
5560 const Func* func;
5561 auto const ctx = arGetContextClass(vmfp());
5562 auto const res UNUSED = lookupCtorMethod(func, obj->getVMClass(), ctx, true);
5563 assertx(res == LookupResult::MethodFoundWithThis);
5565 // Pop the object (ownership to be transferred to the ActRec).
5566 vmStack().discard();
5568 // Push new activation record.
5569 auto ar = vmStack().allocA();
5570 ar->m_func = func;
5571 ar->setThis(obj);
5572 ar->initNumArgs(numArgs);
5573 ar->trashVarEnv();
5576 bool doFCall(ActRec* ar, uint32_t numArgs, bool unpack) {
5577 TRACE(3, "FCall: pc %p func %p base %d\n", vmpc(),
5578 vmfp()->unit()->entry(),
5579 int(vmfp()->func()->base()));
5581 if (unpack) {
5582 Cell* c1 = vmStack().topC();
5583 if (UNLIKELY(!isContainer(*c1))) {
5584 Cell tmp = *c1;
5585 // argument_unpacking RFC dictates "containers and Traversables"
5586 raise_warning_unsampled("Only containers may be unpacked");
5587 *c1 = make_persistent_array_like_tv(staticEmptyVArray());
5588 tvDecRefGen(&tmp);
5591 Cell args = *c1;
5592 vmStack().discard(); // prepareArrayArgs will push arguments onto the stack
5593 SCOPE_EXIT { tvDecRefGen(&args); };
5594 checkStack(vmStack(), ar->func(), 0);
5596 assertx(!ar->resumed());
5597 auto prepResult = prepareArrayArgs(ar, args, vmStack(), numArgs,
5598 nullptr, /* check ref annot */ true);
5599 if (UNLIKELY(!prepResult)) {
5600 vmStack().pushNull(); // return value is null if args are invalid
5601 return false;
5605 prepareFuncEntry(
5607 unpack ? StackArgsState::Trimmed : StackArgsState::Untrimmed);
5608 if (UNLIKELY(!EventHook::FunctionCall(ar, EventHook::NormalFunc))) {
5609 return false;
5611 checkForReifiedGenericsErrors(ar);
5612 calleeDynamicCallChecks(ar);
5613 checkForRequiredCallM(ar);
5614 return true;
5617 namespace {
5619 // Find the AR for the current FPI region by indexing from sp
5620 inline ActRec* arFromSp(int32_t n) {
5621 return reinterpret_cast<ActRec*>(vmStack().top() + n);
5626 bool doFCallUnpackTC(PC origpc, int32_t numArgsInclUnpack, void* retAddr) {
5627 assert_native_stack_aligned();
5628 assertx(tl_regState == VMRegState::DIRTY);
5629 tl_regState = VMRegState::CLEAN;
5630 auto const ar = arFromSp(numArgsInclUnpack);
5631 assertx(ar->numArgs() == numArgsInclUnpack);
5632 ar->setReturn(vmfp(), origpc, jit::tc::ustubs().retHelper);
5633 ar->setJitReturn(retAddr);
5634 auto const ret = doFCall(ar, numArgsInclUnpack - 1, true);
5635 tl_regState = VMRegState::DIRTY;
5636 return ret;
5639 OPTBLD_FLT_INLINE
5640 void iopFCall(PC origpc, PC& pc, FCallArgs fca,
5641 const StringData* /*clsName*/, const StringData* funcName) {
5642 auto const ar = arFromSp(fca.numArgs + (fca.hasUnpack() ? 1 : 0));
5643 auto const func = ar->func();
5644 assertx(
5645 funcName->empty() ||
5646 RuntimeOption::EvalJitEnableRenameFunction ||
5647 (func->attrs() & AttrInterceptable) ||
5648 func->name()->isame(funcName) || (
5649 funcName == s_construct.get() &&
5650 func == func->cls()->getCtor()
5653 assertx(fca.numArgs + (fca.hasUnpack() ? 1 : 0) == ar->numArgs());
5654 if (fca.enforceReffiness()) callerReffinessChecks(func, fca);
5655 if (ar->isDynamicCall()) callerDynamicCallChecks(func);
5656 if (rxEnforceCallsInLevel(vmfp()->rxMinLevel())) {
5657 callerRxChecks(vmfp(), func);
5659 checkStack(vmStack(), func, 0);
5660 if (fca.numRets != 1) ar->setFCallM();
5661 auto const asyncEagerReturn =
5662 fca.asyncEagerOffset != kInvalidOffset && func->supportsAsyncEagerReturn();
5663 if (asyncEagerReturn) ar->setAsyncEagerReturn();
5664 ar->setReturn(vmfp(), origpc, jit::tc::ustubs().retHelper);
5665 doFCall(ar, fca.numArgs, fca.hasUnpack());
5666 pc = vmpc();
5669 OPTBLD_FLT_INLINE
5670 void iopFCallBuiltin(uint32_t numArgs, uint32_t numNonDefault, Id id) {
5671 const NamedEntity* ne = vmfp()->m_func->unit()->lookupNamedEntityId(id);
5672 Func* func = Unit::lookupFunc(ne);
5673 if (func == nullptr) {
5674 raise_error("Call to undefined function %s()",
5675 vmfp()->m_func->unit()->lookupLitstrId(id)->data());
5678 if (rxEnforceCallsInLevel(vmfp()->rxMinLevel())) {
5679 callerRxChecks(vmfp(), func);
5682 TypedValue* args = vmStack().indTV(numArgs-1);
5683 TypedValue ret;
5684 Native::coerceFCallArgs(args, numArgs, numNonDefault, func);
5686 if (func->hasVariadicCaptureParam()) {
5687 assertx(numArgs > 0);
5688 assertx(
5689 RuntimeOption::EvalHackArrDVArrs
5690 ? isVecType(args[1 - safe_cast<int32_t>(numArgs)].m_type)
5691 : isArrayType(args[1 - safe_cast<int32_t>(numArgs)].m_type)
5694 Native::callFunc<true>(func, nullptr, args, numNonDefault, ret);
5696 frame_free_args(args, numNonDefault);
5697 vmStack().ndiscard(numArgs);
5698 tvCopy(ret, *vmStack().allocTV());
5701 namespace {
5703 template <bool Local, bool Pop>
5704 bool initIterator(PC& pc, PC targetpc, Iter* it, Cell* c1) {
5705 auto const hasElems = it->init<Local>(c1);
5706 if (!hasElems) pc = targetpc;
5707 if (Pop) vmStack().popC();
5708 return hasElems;
5713 OPTBLD_INLINE void iopIterInit(PC& pc, Iter* it, PC targetpc, local_var val) {
5714 Cell* c1 = vmStack().topC();
5715 if (initIterator<false, true>(pc, targetpc, it, c1)) {
5716 tvAsVariant(val.ptr) = it->arr().second();
5720 OPTBLD_INLINE
5721 void iopIterInitK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5722 Cell* c1 = vmStack().topC();
5723 if (initIterator<false, true>(pc, targetpc, it, c1)) {
5724 tvAsVariant(val.ptr) = it->arr().second();
5725 tvAsVariant(key.ptr) = it->arr().first();
5729 OPTBLD_INLINE void iopLIterInit(PC& pc, Iter* it, local_var local,
5730 PC targetpc, local_var val) {
5731 if (isArrayLikeType(local.ptr->m_type)) {
5732 if (initIterator<true, false>(pc, targetpc, it, tvAssertCell(local.ptr))) {
5733 tvAsVariant(val.ptr) = it->arr().secondLocal(local.ptr->m_data.parr);
5735 return;
5738 if (initIterator<false, false>(pc, targetpc, it, tvToCell(local.ptr))) {
5739 tvAsVariant(val.ptr) = it->arr().second();
5743 OPTBLD_INLINE void iopLIterInitK(PC& pc, Iter* it, local_var local,
5744 PC targetpc, local_var val, local_var key) {
5745 if (isArrayLikeType(local.ptr->m_type)) {
5746 if (initIterator<true, false>(pc, targetpc, it, tvAssertCell(local.ptr))) {
5747 tvAsVariant(val.ptr) = it->arr().secondLocal(local.ptr->m_data.parr);
5748 tvAsVariant(key.ptr) = it->arr().firstLocal(local.ptr->m_data.parr);
5750 return;
5753 if (initIterator<false, false>(pc, targetpc, it, tvToCell(local.ptr))) {
5754 tvAsVariant(val.ptr) = it->arr().second();
5755 tvAsVariant(key.ptr) = it->arr().first();
5759 OPTBLD_INLINE void iopIterNext(PC& pc, Iter* it, PC targetpc, local_var val) {
5760 if (it->next()) {
5761 vmpc() = targetpc;
5762 jmpSurpriseCheck(targetpc - pc);
5763 pc = targetpc;
5764 tvAsVariant(val.ptr) = it->arr().second();
5768 OPTBLD_INLINE
5769 void iopIterNextK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5770 if (it->next()) {
5771 vmpc() = targetpc;
5772 jmpSurpriseCheck(targetpc - pc);
5773 pc = targetpc;
5774 tvAsVariant(val.ptr) = it->arr().second();
5775 tvAsVariant(key.ptr) = it->arr().first();
5779 OPTBLD_INLINE void iopLIterNext(PC& pc,
5780 Iter* it,
5781 local_var base,
5782 PC targetpc,
5783 local_var val) {
5784 if (isArrayLikeType(base.ptr->m_type)) {
5785 if (it->nextLocal(base.ptr->m_data.parr)) {
5786 vmpc() = targetpc;
5787 jmpSurpriseCheck(targetpc - pc);
5788 pc = targetpc;
5789 tvAsVariant(val.ptr) = it->arr().secondLocal(base.ptr->m_data.parr);
5791 } else if (it->next()) {
5792 vmpc() = targetpc;
5793 jmpSurpriseCheck(targetpc - pc);
5794 pc = targetpc;
5795 tvAsVariant(val.ptr) = it->arr().second();
5799 OPTBLD_INLINE void iopLIterNextK(PC& pc,
5800 Iter* it,
5801 local_var base,
5802 PC targetpc,
5803 local_var val,
5804 local_var key) {
5805 if (isArrayLikeType(base.ptr->m_type)) {
5806 if (it->nextLocal(base.ptr->m_data.parr)) {
5807 vmpc() = targetpc;
5808 jmpSurpriseCheck(targetpc - pc);
5809 pc = targetpc;
5810 tvAsVariant(val.ptr) = it->arr().secondLocal(base.ptr->m_data.parr);
5811 tvAsVariant(key.ptr) = it->arr().firstLocal(base.ptr->m_data.parr);
5813 } else if (it->next()) {
5814 vmpc() = targetpc;
5815 jmpSurpriseCheck(targetpc - pc);
5816 pc = targetpc;
5817 tvAsVariant(val.ptr) = it->arr().second();
5818 tvAsVariant(key.ptr) = it->arr().first();
5822 OPTBLD_INLINE void iopIterFree(Iter* it) {
5823 it->free();
5826 OPTBLD_INLINE void iopLIterFree(Iter* it, local_var) {
5827 it->free();
5830 OPTBLD_INLINE void inclOp(PC origpc, PC& pc, InclOpFlags flags,
5831 const char* opName) {
5832 Cell* c1 = vmStack().topC();
5833 auto path = String::attach(prepareKey(*c1));
5834 bool initial;
5835 TRACE(2, "inclOp %s %s %s %s \"%s\"\n",
5836 flags & InclOpFlags::Once ? "Once" : "",
5837 flags & InclOpFlags::DocRoot ? "DocRoot" : "",
5838 flags & InclOpFlags::Relative ? "Relative" : "",
5839 flags & InclOpFlags::Fatal ? "Fatal" : "",
5840 path.data());
5842 auto curUnitFilePath = [&] {
5843 namespace fs = boost::filesystem;
5844 fs::path currentUnit(vmfp()->m_func->unit()->filepath()->data());
5845 fs::path currentDir(currentUnit.branch_path());
5846 return currentDir.string();
5849 auto const unit = [&] {
5850 if (flags & InclOpFlags::Relative) {
5851 String absPath = curUnitFilePath() + '/';
5852 absPath += path;
5853 return lookupUnit(absPath.get(), "", &initial,
5854 Native::s_noNativeFuncs);
5856 if (flags & InclOpFlags::DocRoot) {
5857 return lookupUnit(
5858 SourceRootInfo::RelativeToPhpRoot(path).get(), "", &initial,
5859 Native::s_noNativeFuncs);
5861 return lookupUnit(path.get(), curUnitFilePath().c_str(), &initial,
5862 Native::s_noNativeFuncs);
5863 }();
5865 vmStack().popC();
5866 if (unit == nullptr) {
5867 if (flags & InclOpFlags::Fatal) {
5868 raise_error("%s(%s): File not found", opName, path.data());
5869 } else {
5870 raise_warning("%s(%s): File not found", opName, path.data());
5872 vmStack().pushBool(false);
5873 return;
5876 if (!(flags & InclOpFlags::Once) || initial) {
5877 g_context->evalUnit(unit, origpc, pc, EventHook::PseudoMain);
5878 } else {
5879 Stats::inc(Stats::PseudoMain_Guarded);
5880 vmStack().pushBool(true);
5884 OPTBLD_INLINE void iopIncl(PC origpc, PC& pc) {
5885 inclOp(origpc, pc, InclOpFlags::Default, "include");
5888 OPTBLD_INLINE void iopInclOnce(PC origpc, PC& pc) {
5889 inclOp(origpc, pc, InclOpFlags::Once, "include_once");
5892 OPTBLD_INLINE void iopReq(PC origpc, PC& pc) {
5893 inclOp(origpc, pc, InclOpFlags::Fatal, "require");
5896 OPTBLD_INLINE void iopReqOnce(PC origpc, PC& pc) {
5897 inclOp(origpc, pc, InclOpFlags::Fatal | InclOpFlags::Once, "require_once");
5900 OPTBLD_INLINE void iopReqDoc(PC origpc, PC& pc) {
5901 inclOp(
5902 origpc,
5904 InclOpFlags::Fatal | InclOpFlags::Once | InclOpFlags::DocRoot,
5905 "require_once"
5909 OPTBLD_INLINE void iopEval(PC origpc, PC& pc) {
5910 Cell* c1 = vmStack().topC();
5912 if (UNLIKELY(RuntimeOption::EvalAuthoritativeMode)) {
5913 // Ahead of time whole program optimizations need to assume it can
5914 // see all the code, or it really can't do much.
5915 raise_error("You can't use eval in RepoAuthoritative mode");
5918 auto code = String::attach(prepareKey(*c1));
5919 String prefixedCode = concat(
5920 vmfp()->unit()->isHHFile() ? "<?hh " : "<?php ",
5921 code
5924 auto evalFilename = std::string();
5925 auto vm = &*g_context;
5926 string_printf(
5927 evalFilename,
5928 "%s(%d)(%s" EVAL_FILENAME_SUFFIX,
5929 vm->getContainingFileName()->data(),
5930 vm->getLine(),
5931 string_md5(code.slice()).c_str()
5933 Unit* unit = vm->compileEvalString(prefixedCode.get(), evalFilename.c_str());
5934 if (!RuntimeOption::EvalJitEvaledCode) {
5935 unit->setInterpretOnly();
5937 const StringData* msg;
5938 int line = 0;
5940 vmStack().popC();
5941 if (unit->parseFatal(msg, line)) {
5942 auto const errnum = static_cast<int>(ErrorMode::WARNING);
5943 if (vm->errorNeedsLogging(errnum)) {
5944 // manual call to Logger instead of logError as we need to use
5945 // evalFileName and line as the exception doesn't track the eval()
5946 Logger::Error(
5947 "\nFatal error: %s in %s on line %d",
5948 msg->data(),
5949 evalFilename.c_str(),
5950 line
5954 vmStack().pushBool(false);
5955 return;
5957 vm->evalUnit(unit, origpc, pc, EventHook::Eval);
5960 OPTBLD_INLINE void iopDefCls(uint32_t cid) {
5961 PreClass* c = vmfp()->m_func->unit()->lookupPreClassId(cid);
5962 Unit::defClass(c);
5965 OPTBLD_INLINE void iopDefRecord(uint32_t cid) {
5966 Record* r = vmfp()->m_func->unit()->lookupRecordId(cid);
5967 Unit::defRecord(r);
5970 OPTBLD_INLINE void iopAliasCls(const StringData* original,
5971 const StringData* alias) {
5972 TypedValue* aloadTV = vmStack().topTV();
5973 tvCastToBooleanInPlace(aloadTV);
5974 assertx(aloadTV->m_type == KindOfBoolean);
5975 bool autoload = aloadTV->m_data.num;
5976 vmStack().popX();
5978 vmStack().pushBool(Unit::aliasClass(original, alias, autoload));
5981 OPTBLD_INLINE void iopDefClsNop(uint32_t /*cid*/) {}
5983 OPTBLD_INLINE void iopDefTypeAlias(uint32_t tid) {
5984 vmfp()->func()->unit()->defTypeAlias(tid);
5987 OPTBLD_INLINE void iopThis() {
5988 checkThis(vmfp());
5989 ObjectData* this_ = vmfp()->getThis();
5990 vmStack().pushObject(this_);
5993 OPTBLD_INLINE void iopBareThis(BareThisOp bto) {
5994 if (vmfp()->func()->cls() && vmfp()->hasThis()) {
5995 ObjectData* this_ = vmfp()->getThis();
5996 vmStack().pushObject(this_);
5997 } else {
5998 vmStack().pushNull();
5999 switch (bto) {
6000 case BareThisOp::Notice: raise_notice(Strings::WARN_NULL_THIS); break;
6001 case BareThisOp::NoNotice: break;
6002 case BareThisOp::NeverNull:
6003 assertx(!"$this cannot be null in BareThis with NeverNull option");
6004 break;
6009 OPTBLD_INLINE void iopCheckThis() {
6010 checkThis(vmfp());
6013 OPTBLD_INLINE void iopInitThisLoc(local_var thisLoc) {
6014 tvDecRefGen(thisLoc.ptr);
6015 if (vmfp()->func()->cls() && vmfp()->hasThis()) {
6016 thisLoc->m_data.pobj = vmfp()->getThis();
6017 thisLoc->m_type = KindOfObject;
6018 tvIncRefCountable(*thisLoc.ptr);
6019 } else {
6020 tvWriteUninit(*thisLoc.ptr);
6024 OPTBLD_INLINE void iopFuncNumArgs() {
6025 if (vmfp()->func()->isPseudoMain()) {
6026 raise_warning(
6027 "func_num_args(): Called from the global scope - no function context"
6029 vmStack().pushInt(-1);
6030 } else {
6031 vmStack().pushInt(vmfp()->numArgs());
6035 static inline TypedValue* lookupClosureStatic(const StringData* name,
6036 const ActRec* fp) {
6037 auto const func = fp->m_func;
6039 assertx(func->isClosureBody());
6040 assertx(!func->hasVariadicCaptureParam());
6041 auto const obj = frame_local(fp, func->numParams())->m_data.pobj;
6043 return lookupStaticTvFromClosure(obj, name);
6046 OPTBLD_INLINE void iopStaticLocCheck(local_var loc, const StringData* var) {
6047 auto const func = vmfp()->m_func;
6049 auto ref = [&] () -> RefData* {
6050 if (UNLIKELY(func->isClosureBody())) {
6051 auto const val = lookupClosureStatic(var, vmfp());
6052 if (val->m_type == KindOfUninit) {
6053 return nullptr;
6055 assertx(isRefType(val->m_type));
6056 return val->m_data.pref;
6059 auto const staticLocalData = rds::bindStaticLocal(func, var);
6060 if (!staticLocalData.isInit()) {
6061 return nullptr;
6064 return &staticLocalData->ref;
6065 }();
6067 if (!ref) return vmStack().pushBool(false);
6069 auto const tmpTV = make_tv<KindOfRef>(ref);
6070 tvBind(tmpTV, *loc.ptr);
6071 vmStack().pushBool(true);
6074 OPTBLD_INLINE void iopStaticLocDef(local_var loc, const StringData* var) {
6075 auto const func = vmfp()->m_func;
6076 auto const initVal = vmStack().topC();
6078 auto ref = [&] () -> RefData* {
6079 if (UNLIKELY(func->isClosureBody())) {
6080 auto const val = lookupClosureStatic(var, vmfp());
6081 assertx(val->m_type == KindOfUninit);
6082 cellCopy(*initVal, *val);
6083 tvBox(*val);
6084 return val->m_data.pref;
6087 auto const staticLocalData = rds::bindStaticLocal(func, var);
6088 if (LIKELY(!staticLocalData.isInit())) {
6089 staticLocalData->ref.initInRDS();
6090 staticLocalData.markInit();
6091 cellCopy(*initVal, *staticLocalData->ref.cell());
6092 } else {
6093 cellMove(*initVal, *staticLocalData->ref.cell());
6095 return &staticLocalData->ref;
6096 }();
6098 auto const tmpTV = make_tv<KindOfRef>(ref);
6099 tvBind(tmpTV, *loc.ptr);
6100 vmStack().discard();
6103 OPTBLD_INLINE void iopStaticLocInit(local_var loc, const StringData* var) {
6104 auto const func = vmfp()->m_func;
6105 auto const initVal = vmStack().topC();
6107 auto ref = [&] () -> RefData* {
6108 if (UNLIKELY(func->isClosureBody())) {
6109 auto const val = lookupClosureStatic(var, vmfp());
6110 if (val->m_type == KindOfUninit) {
6111 cellCopy(*initVal, *val);
6112 tvBox(*val);
6114 return val->m_data.pref;
6117 auto const staticLocalData = rds::bindStaticLocal(func, var);
6118 if (!staticLocalData.isInit()) {
6119 staticLocalData->ref.initInRDS();
6120 staticLocalData.markInit();
6121 cellCopy(*initVal, *staticLocalData->ref.cell());
6123 return &staticLocalData->ref;
6124 }();
6126 auto const tmpTV = make_tv<KindOfRef>(ref);
6127 tvBind(tmpTV, *loc.ptr);
6128 vmStack().discard();
6131 OPTBLD_INLINE void iopCatch() {
6132 auto vm = &*g_context;
6133 assertx(vm->m_faults.size() > 0);
6134 Fault fault = vm->m_faults.back();
6135 vm->m_faults.pop_back();
6136 assertx(fault.m_raiseFrame == vmfp());
6137 assertx(fault.m_userException);
6138 vmStack().pushObjectNoRc(fault.m_userException);
6141 OPTBLD_INLINE void iopChainFaults() {
6142 auto const current = *vmStack().indC(1);
6143 auto const prev = *vmStack().indC(0);
6144 if (!isObjectType(current.m_type) ||
6145 !current.m_data.pobj->instanceof(SystemLib::s_ThrowableClass) ||
6146 !isObjectType(prev.m_type) ||
6147 !prev.m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) {
6148 raise_error(
6149 "Inputs to ChainFault must be objects that implement Throwable"
6153 // chainFaultObjects takes ownership of a reference to prev.
6154 vmStack().discard();
6155 chainFaultObjects(current.m_data.pobj, prev.m_data.pobj);
6158 OPTBLD_INLINE void iopLateBoundCls(clsref_slot slot) {
6159 Class* cls = frameStaticClass(vmfp());
6160 if (!cls) {
6161 raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
6163 slot.put(nullptr, cls);
6166 OPTBLD_INLINE void iopVerifyParamType(local_var param) {
6167 const Func *func = vmfp()->m_func;
6168 assertx(param.index < func->numParams());
6169 assertx(func->numParams() == int(func->params().size()));
6170 const TypeConstraint& tc = func->params()[param.index].typeConstraint;
6171 if (tc.isCheckable()) tc.verifyParam(param.ptr, func, param.index);
6174 OPTBLD_INLINE void iopVerifyParamTypeTS(local_var param) {
6175 iopVerifyParamType(param);
6176 auto const cell = vmStack().topC();
6177 assertx(tvIsDictOrDArray(cell));
6178 auto isTypeVar = tcCouldBeReified(vmfp()->m_func, param.index);
6179 bool warn = false;
6180 if ((isTypeVar || tvIsObject(param.ptr)) &&
6181 !verifyReifiedLocalType(cell->m_data.parr, param.ptr, isTypeVar, warn)) {
6182 raise_reified_typehint_error(
6183 folly::sformat(
6184 "Argument {} passed to {}() must be an instance of {}, given {}",
6185 param.index + 1,
6186 vmfp()->m_func->fullName()->data(),
6187 TypeStructure::toStringForDisplay(ArrNR(cell->m_data.parr)).c_str(),
6188 describe_actual_type(param.ptr, true)
6189 ), warn
6192 vmStack().popC();
6195 OPTBLD_INLINE void iopVerifyOutType(uint32_t paramId) {
6196 auto const func = vmfp()->m_func;
6197 assertx(paramId < func->numParams());
6198 assertx(func->numParams() == int(func->params().size()));
6199 auto const& tc = func->params()[paramId].typeConstraint;
6200 if (tc.isCheckable()) tc.verifyOutParam(vmStack().topTV(), func, paramId);
6203 namespace {
6205 OPTBLD_INLINE void verifyRetTypeImpl(size_t ind) {
6206 const auto func = vmfp()->m_func;
6207 const auto tc = func->returnTypeConstraint();
6208 if (tc.isCheckable()) tc.verifyReturn(vmStack().indC(ind), func);
6211 } // namespace
6213 OPTBLD_INLINE void iopVerifyRetTypeC() {
6214 if (UNLIKELY(!RuntimeOption::EvalCheckReturnTypeHints)) return;
6215 verifyRetTypeImpl(0); // Cell is on the top of the stack
6218 OPTBLD_INLINE void iopVerifyRetTypeTS() {
6219 if (UNLIKELY(!RuntimeOption::EvalCheckReturnTypeHints)) {
6220 vmStack().popC();
6221 return;
6223 verifyRetTypeImpl(1); // Cell is the second element on the stack
6224 auto const ts = vmStack().topC();
6225 assertx(tvIsDictOrDArray(ts));
6226 auto const cell = vmStack().indC(1);
6227 bool isTypeVar = tcCouldBeReified(vmfp()->m_func, TypeConstraint::ReturnId);
6228 bool warn = false;
6229 if ((isTypeVar || tvIsObject(cell)) &&
6230 !verifyReifiedLocalType(ts->m_data.parr, cell, isTypeVar, warn)) {
6231 raise_reified_typehint_error(
6232 folly::sformat(
6233 "Value returned from function {}() must be of type {}, {} given",
6234 vmfp()->m_func->fullName()->data(),
6235 TypeStructure::toStringForDisplay(ArrNR(ts->m_data.parr)).c_str(),
6236 describe_actual_type(cell, true)
6237 ), warn
6240 vmStack().popC();
6243 OPTBLD_INLINE void iopVerifyRetNonNullC() {
6244 if (UNLIKELY(!RuntimeOption::EvalCheckReturnTypeHints)) return;
6245 const auto func = vmfp()->m_func;
6246 const auto tc = func->returnTypeConstraint();
6247 tc.verifyReturnNonNull(vmStack().topC(), func);
6250 OPTBLD_INLINE TCA iopNativeImpl(PC& pc) {
6251 auto const jitReturn = jitReturnPre(vmfp());
6252 auto const func = vmfp()->func()->arFuncPtr();
6253 assertx(func);
6254 // Actually call the native implementation. This will handle freeing the
6255 // locals in the normal case. In the case of an exception, the VM unwinder
6256 // will take care of it.
6257 func(vmfp());
6259 // Grab caller info from ActRec.
6260 ActRec* sfp = vmfp()->sfp();
6261 Offset callOff = vmfp()->m_callOff;
6263 // Adjust the stack; the native implementation put the return value in the
6264 // right place for us already
6265 vmStack().ndiscard(vmfp()->func()->numSlotsInFrame());
6266 vmStack().ret();
6268 // Return control to the caller.
6269 returnToCaller(pc, sfp, callOff);
6271 return jitReturnPost(jitReturn);
6274 OPTBLD_INLINE void iopSelf(clsref_slot slot) {
6275 Class* clss = arGetContextClass(vmfp());
6276 if (!clss) {
6277 raise_error(HPHP::Strings::CANT_ACCESS_SELF);
6279 slot.put(nullptr, clss);
6282 OPTBLD_INLINE void iopParent(clsref_slot slot) {
6283 Class* clss = arGetContextClass(vmfp());
6284 if (!clss) {
6285 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
6287 Class* parent = clss->parent();
6288 if (!parent) {
6289 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
6291 slot.put(nullptr, parent);
6294 OPTBLD_INLINE void iopCreateCl(uint32_t numArgs, uint32_t clsIx) {
6295 auto const func = vmfp()->m_func;
6296 auto const preCls = func->unit()->lookupPreClassId(clsIx);
6297 auto const c = Unit::defClosure(preCls);
6299 auto const cls = c->rescope(const_cast<Class*>(func->cls()));
6300 auto obj = newInstance(cls);
6301 c_Closure::fromObject(obj)->init(numArgs, vmfp(), vmStack().top());
6302 vmStack().ndiscard(numArgs);
6303 vmStack().pushObjectNoRc(obj);
6306 static inline BaseGenerator* this_base_generator(const ActRec* fp) {
6307 auto const obj = fp->getThis();
6308 assertx(obj->getVMClass() == AsyncGenerator::getClass() ||
6309 obj->getVMClass() == Generator::getClass());
6310 return obj->getVMClass() == Generator::getClass()
6311 ? static_cast<BaseGenerator*>(Generator::fromObject(obj))
6312 : static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj));
6315 static inline Generator* this_generator(const ActRec* fp) {
6316 auto const obj = fp->getThis();
6317 return Generator::fromObject(obj);
6320 const StaticString s_this("this");
6322 OPTBLD_INLINE TCA iopCreateCont(PC& pc) {
6323 auto const jitReturn = jitReturnPre(vmfp());
6325 auto const fp = vmfp();
6326 auto const func = fp->func();
6327 auto const numSlots = func->numSlotsInFrame();
6328 auto const resumeOffset = func->unit()->offsetOf(pc);
6329 assertx(!fp->resumed());
6330 assertx(func->isGenerator());
6332 // Create the {Async,}Generator object. Create takes care of copying local
6333 // variables and iterators.
6334 auto const obj = func->isAsync()
6335 ? AsyncGenerator::Create(fp, numSlots, nullptr, resumeOffset)
6336 : Generator::Create(fp, numSlots, nullptr, resumeOffset);
6338 auto const genData = func->isAsync() ?
6339 static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj)) :
6340 static_cast<BaseGenerator*>(Generator::fromObject(obj));
6342 EventHook::FunctionSuspendCreateCont(fp, genData->actRec());
6344 // Grab caller info from ActRec.
6345 ActRec* sfp = fp->sfp();
6346 Offset callOff = fp->m_callOff;
6348 // Free ActRec and store the return value.
6349 vmStack().ndiscard(numSlots);
6350 vmStack().ret();
6351 tvCopy(make_tv<KindOfObject>(obj), *vmStack().topTV());
6352 assertx(vmStack().topTV() == fp->retSlot());
6354 // Return control to the caller.
6355 returnToCaller(pc, sfp, callOff);
6357 return jitReturnPost(jitReturn);
6360 OPTBLD_INLINE void movePCIntoGenerator(PC origpc, BaseGenerator* gen) {
6361 assertx(gen->isRunning());
6362 ActRec* genAR = gen->actRec();
6363 genAR->setReturn(vmfp(), origpc, genAR->func()->isAsync() ?
6364 jit::tc::ustubs().asyncGenRetHelper :
6365 jit::tc::ustubs().genRetHelper);
6367 vmfp() = genAR;
6369 assertx(genAR->func()->contains(gen->resumable()->resumeOffset()));
6370 vmpc() = genAR->func()->unit()->at(gen->resumable()->resumeOffset());
6373 OPTBLD_INLINE bool tvIsGenerator(TypedValue tv) {
6374 return tv.m_type == KindOfObject &&
6375 tv.m_data.pobj->instanceof(Generator::getClass());
6378 template<bool recursive>
6379 OPTBLD_INLINE void contEnterImpl(PC origpc) {
6381 // The stack must have one cell! Or else resumableStackBase() won't work!
6382 assertx(vmStack().top() + 1 ==
6383 (TypedValue*)vmfp() - vmfp()->m_func->numSlotsInFrame());
6385 // Do linkage of the generator's AR.
6386 assertx(vmfp()->hasThis());
6387 // `recursive` determines whether we enter just the top generator or whether
6388 // we drop down to the lowest running delegate generator. This is useful for
6389 // ContRaise, which should throw from the context of the lowest generator.
6390 if(!recursive || vmfp()->getThis()->getVMClass() != Generator::getClass()) {
6391 movePCIntoGenerator(origpc, this_base_generator(vmfp()));
6392 } else {
6393 // TODO(https://github.com/facebook/hhvm/issues/6040)
6394 // Implement throwing from delegate generators.
6395 assertx(vmfp()->getThis()->getVMClass() == Generator::getClass());
6396 auto gen = this_generator(vmfp());
6397 if (gen->m_delegate.m_type != KindOfNull) {
6398 SystemLib::throwExceptionObject("Throwing from a delegate generator is "
6399 "not currently supported in HHVM");
6401 movePCIntoGenerator(origpc, gen);
6404 EventHook::FunctionResumeYield(vmfp());
6407 OPTBLD_INLINE void iopContEnter(PC origpc, PC& pc) {
6408 contEnterImpl<false>(origpc);
6409 pc = vmpc();
6412 OPTBLD_INLINE void iopContRaise(PC origpc, PC& pc) {
6413 contEnterImpl<true>(origpc);
6414 pc = vmpc();
6415 iopThrow(pc);
6418 OPTBLD_INLINE TCA yield(PC& pc, const Cell* key, const Cell value) {
6419 auto const jitReturn = jitReturnPre(vmfp());
6421 auto const fp = vmfp();
6422 auto const func = fp->func();
6423 auto const resumeOffset = func->unit()->offsetOf(pc);
6424 assertx(fp->resumed());
6425 assertx(func->isGenerator());
6427 EventHook::FunctionSuspendYield(fp);
6429 auto const sfp = fp->sfp();
6430 auto const callOff = fp->m_callOff;
6432 if (!func->isAsync()) {
6433 // Non-async generator.
6434 assertx(fp->sfp());
6435 frame_generator(fp)->yield(resumeOffset, key, value);
6437 // Push return value of next()/send()/raise().
6438 vmStack().pushNull();
6439 } else {
6440 // Async generator.
6441 auto const gen = frame_async_generator(fp);
6442 auto const eagerResult = gen->yield(resumeOffset, key, value);
6443 if (eagerResult) {
6444 // Eager execution => return StaticWaitHandle.
6445 assertx(sfp);
6446 vmStack().pushObjectNoRc(eagerResult);
6447 } else {
6448 // Resumed execution => return control to the scheduler.
6449 assertx(!sfp);
6453 returnToCaller(pc, sfp, callOff);
6455 return jitReturnPost(jitReturn);
6458 OPTBLD_INLINE TCA iopYield(PC& pc) {
6459 auto const value = *vmStack().topC();
6460 vmStack().discard();
6461 return yield(pc, nullptr, value);
6464 OPTBLD_INLINE TCA iopYieldK(PC& pc) {
6465 auto const key = *vmStack().indC(1);
6466 auto const value = *vmStack().topC();
6467 vmStack().ndiscard(2);
6468 return yield(pc, &key, value);
6471 OPTBLD_INLINE bool typeIsValidGeneratorDelegate(DataType type) {
6472 return type == KindOfArray ||
6473 type == KindOfPersistentArray ||
6474 type == KindOfObject;
6477 OPTBLD_INLINE void iopContAssignDelegate(Iter* iter) {
6478 auto param = *vmStack().topC();
6479 vmStack().discard();
6480 auto gen = frame_generator(vmfp());
6481 if (UNLIKELY(!typeIsValidGeneratorDelegate(param.m_type))) {
6482 tvDecRefGen(param);
6483 SystemLib::throwErrorObject(
6484 "Can use \"yield from\" only with arrays and Traversables"
6488 // We don't use the iterator if we have a delegate generator (as iterators
6489 // mess with the internal state of the generator), so short circuit and dont
6490 // init our iterator in that case. Otherwise, if we init our iterator and it
6491 // returns false then we know that we have an empty iterator (like `[]`) in
6492 // which case just set our delegate to Null so that ContEnterDelegate and
6493 // YieldFromDelegate know something is up.
6494 if (tvIsGenerator(param) || iter->init<false>(&param)) {
6495 cellSet(param, gen->m_delegate);
6496 } else {
6497 cellSetNull(gen->m_delegate);
6499 // When using a subgenerator we don't actually read the values of the m_key
6500 // and m_value of our frame generator (the delegating generator). The
6501 // generator itself is still holding a reference to them though, so null
6502 // out the key/value to free the memory.
6503 cellSetNull(gen->m_key);
6504 cellSetNull(gen->m_value);
6507 OPTBLD_INLINE void iopContEnterDelegate(PC origpc, PC& pc) {
6508 // Make sure we have a delegate
6509 auto gen = frame_generator(vmfp());
6511 // Ignore the VM Stack, we want to pass that down from ContEnter
6513 // ContEnterDelegate doesn't do anything for iterators.
6514 if (!tvIsGenerator(gen->m_delegate)) {
6515 return;
6518 auto delegate = Generator::fromObject(gen->m_delegate.m_data.pobj);
6520 if (delegate->getState() == BaseGenerator::State::Done) {
6521 // If our generator finished earlier (or if there was nothing to do) just
6522 // continue on and let YieldFromDelegate handle cleaning up.
6523 return;
6526 // A pretty odd if statement, but consider the following situation.
6527 // Generators A and B both do `yield from` on a shared delegate generator,
6528 // C. When A is first used we autoprime it, and therefore also autoprime C as
6529 // well. Then we also autoprime B when it gets used, which advances C past
6530 // some perfectly valid data.
6531 // Basically this check is to make sure that we autoprime delegate generators
6532 // when needed, and not if they're shared.
6533 if (gen->getState() == BaseGenerator::State::Priming &&
6534 delegate->getState() != BaseGenerator::State::Created) {
6535 return;
6538 // We're about to resume executing our generator, so make sure we're in the
6539 // right state.
6540 delegate->preNext(false);
6542 movePCIntoGenerator(origpc, delegate);
6543 EventHook::FunctionResumeYield(vmfp());
6544 pc = vmpc();
6547 OPTBLD_INLINE
6548 TCA yieldFromGenerator(PC& pc, Generator* gen, Offset resumeOffset) {
6549 auto fp = vmfp();
6551 assertx(tvIsGenerator(gen->m_delegate));
6552 auto delegate = Generator::fromObject(gen->m_delegate.m_data.pobj);
6554 if (delegate->getState() == BaseGenerator::State::Done) {
6555 // If the generator is done, just copy the return value onto the stack.
6556 cellDup(delegate->m_value, *vmStack().topTV());
6557 return nullptr;
6560 auto jitReturn = jitReturnPre(fp);
6562 EventHook::FunctionSuspendYield(fp);
6563 auto const sfp = fp->sfp();
6564 auto const callOff = fp->m_callOff;
6566 // We don't actually want to "yield" anything here. The implementation of
6567 // key/current are smart enough to dive into our delegate generator, so
6568 // really what we want to do is clean up all of the generator metadata
6569 // (state, ressume address, etc) and continue on.
6570 assertx(gen->isRunning());
6571 gen->resumable()->setResumeAddr(nullptr, resumeOffset);
6572 gen->setState(BaseGenerator::State::Started);
6574 returnToCaller(pc, sfp, callOff);
6576 return jitReturnPost(jitReturn);
6579 OPTBLD_INLINE
6580 TCA yieldFromIterator(PC& pc, Generator* gen, Iter* it, Offset resumeOffset) {
6581 auto fp = vmfp();
6583 // For the most part this should never happen, the emitter assigns our
6584 // delegate to a non-null value in ContAssignDelegate. The one exception to
6585 // this is if we are given an empty iterator, in which case
6586 // ContAssignDelegate will remove our delegate and just send us to
6587 // YieldFromDelegate to return our null.
6588 if (UNLIKELY(gen->m_delegate.m_type == KindOfNull)) {
6589 tvWriteNull(*vmStack().topTV());
6590 return nullptr;
6593 // Otherwise, if iteration is finished we just return null.
6594 auto arr = it->arr();
6595 if (arr.end()) {
6596 // Push our null return value onto the stack
6597 tvWriteNull(*vmStack().topTV());
6598 return nullptr;
6601 auto jitReturn = jitReturnPre(fp);
6603 EventHook::FunctionSuspendYield(fp);
6604 auto const sfp = fp->sfp();
6605 auto const callOff = fp->m_callOff;
6607 auto key = *(arr.first().asTypedValue());
6608 auto value = *(arr.second().asTypedValue());
6609 gen->yield(resumeOffset, &key, value);
6611 returnToCaller(pc, sfp, callOff);
6613 it->next();
6615 return jitReturnPost(jitReturn);
6618 OPTBLD_INLINE TCA iopYieldFromDelegate(PC& pc, Iter* it, PC resumePc) {
6619 auto gen = frame_generator(vmfp());
6620 auto func = vmfp()->func();
6621 auto resumeOffset = func->unit()->offsetOf(resumePc);
6622 if (tvIsGenerator(gen->m_delegate)) {
6623 return yieldFromGenerator(pc, gen, resumeOffset);
6625 return yieldFromIterator(pc, gen, it, resumeOffset);
6628 OPTBLD_INLINE void iopContUnsetDelegate(CudOp subop, Iter* iter) {
6629 auto gen = frame_generator(vmfp());
6630 // The `shouldFreeIter` immediate determines whether we need to call free
6631 // on our iterator or not. Normally if we finish executing our yield from
6632 // successfully then the implementation of `next` will automatically do it
6633 // for us when there aren't any elements left, but if an exception is thrown
6634 // then we need to do it manually. We don't use the iterator when the
6635 // delegate is a generator though, so even if the param tells us to free it
6636 // we should just ignore it.
6637 if (UNLIKELY(subop == CudOp::FreeIter && !tvIsGenerator(gen->m_delegate))) {
6638 iter->free();
6640 cellSetNull(gen->m_delegate);
6643 OPTBLD_INLINE void iopContCheck(ContCheckOp subop) {
6644 this_base_generator(vmfp())->preNext(subop == ContCheckOp::CheckStarted);
6647 OPTBLD_INLINE void iopContValid() {
6648 vmStack().pushBool(
6649 this_generator(vmfp())->getState() != BaseGenerator::State::Done);
6652 OPTBLD_INLINE Generator *currentlyDelegatedGenerator(Generator *gen) {
6653 while(tvIsGenerator(gen->m_delegate)) {
6654 gen = Generator::fromObject(gen->m_delegate.m_data.pobj);
6656 return gen;
6659 OPTBLD_INLINE void iopContKey() {
6660 Generator* cont = this_generator(vmfp());
6661 cont->startedCheck();
6663 // If we are currently delegating to a generator, return its key instead
6664 cont = currentlyDelegatedGenerator(cont);
6666 cellDup(cont->m_key, *vmStack().allocC());
6669 OPTBLD_INLINE void iopContCurrent() {
6670 Generator* cont = this_generator(vmfp());
6671 cont->startedCheck();
6673 // If we are currently delegating to a generator, return its value instead
6674 cont = currentlyDelegatedGenerator(cont);
6676 if(cont->getState() == BaseGenerator::State::Done) {
6677 vmStack().pushNull();
6678 } else {
6679 cellDup(cont->m_value, *vmStack().allocC());
6683 OPTBLD_INLINE void iopContGetReturn() {
6684 Generator* cont = this_generator(vmfp());
6685 cont->startedCheck();
6687 if(!cont->successfullyFinishedExecuting()) {
6688 SystemLib::throwExceptionObject("Cannot get return value of a generator "
6689 "that hasn't returned");
6692 cellDup(cont->m_value, *vmStack().allocC());
6695 OPTBLD_INLINE void asyncSuspendE(PC& pc) {
6696 auto const fp = vmfp();
6697 auto const func = fp->func();
6698 auto const resumeOffset = func->unit()->offsetOf(pc);
6699 assertx(func->isAsync());
6700 assertx(resumeModeFromActRec(fp) != ResumeMode::Async);
6702 // Pop the dependency we are blocked on.
6703 auto child = wait_handle<c_WaitableWaitHandle>(*vmStack().topC());
6704 assertx(!child->isFinished());
6705 vmStack().discard();
6707 if (!func->isGenerator()) { // Async function.
6708 // Create the AsyncFunctionWaitHandle object. Create takes care of
6709 // copying local variables and itertors.
6710 auto waitHandle = c_AsyncFunctionWaitHandle::Create<true>(
6711 fp, func->numSlotsInFrame(), nullptr, resumeOffset, child);
6713 // Call the suspend hook. It will decref the newly allocated waitHandle
6714 // if it throws.
6715 EventHook::FunctionSuspendAwaitEF(fp, waitHandle->actRec());
6717 // Grab caller info from ActRec.
6718 ActRec* sfp = fp->sfp();
6719 Offset callOff = fp->m_callOff;
6721 // Free ActRec and store the return value. In case async eager return was
6722 // requested by the caller, let it know that we did not finish eagerly.
6723 vmStack().ndiscard(func->numSlotsInFrame());
6724 vmStack().ret();
6725 tvCopy(make_tv<KindOfObject>(waitHandle), *vmStack().topTV());
6726 vmStack().topTV()->m_aux.u_asyncNonEagerReturnFlag = -1;
6727 assertx(vmStack().topTV() == fp->retSlot());
6729 // Return control to the caller.
6730 returnToCaller(pc, sfp, callOff);
6731 } else { // Async generator.
6732 // Create new AsyncGeneratorWaitHandle.
6733 auto waitHandle = c_AsyncGeneratorWaitHandle::Create(
6734 fp, nullptr, resumeOffset, child);
6736 // Call the suspend hook. It will decref the newly allocated waitHandle
6737 // if it throws.
6738 EventHook::FunctionSuspendAwaitEG(fp);
6740 // Store the return value.
6741 vmStack().pushObjectNoRc(waitHandle);
6743 // Return control to the caller (AG::next()).
6744 assertx(fp->sfp());
6745 returnToCaller(pc, fp->sfp(), fp->m_callOff);
6749 OPTBLD_INLINE void asyncSuspendR(PC& pc) {
6750 auto const fp = vmfp();
6751 auto const func = fp->func();
6752 auto const resumeOffset = func->unit()->offsetOf(pc);
6753 assertx(!fp->sfp());
6754 assertx(func->isAsync());
6755 assertx(resumeModeFromActRec(fp) == ResumeMode::Async);
6757 // Pop the dependency we are blocked on.
6758 auto child = req::ptr<c_WaitableWaitHandle>::attach(
6759 wait_handle<c_WaitableWaitHandle>(*vmStack().topC()));
6760 assertx(!child->isFinished());
6761 vmStack().discard();
6763 // Before adjusting the stack or doing anything, check the suspend hook.
6764 // This can throw.
6765 EventHook::FunctionSuspendAwaitR(fp, child.get());
6767 // Await child and suspend the async function/generator. May throw.
6768 if (!func->isGenerator()) { // Async function.
6769 frame_afwh(fp)->await(resumeOffset, std::move(child));
6770 } else { // Async generator.
6771 auto const gen = frame_async_generator(fp);
6772 gen->resumable()->setResumeAddr(nullptr, resumeOffset);
6773 gen->getWaitHandle()->await(std::move(child));
6776 // Return control to the scheduler.
6777 pc = nullptr;
6778 vmfp() = nullptr;
6781 namespace {
6783 TCA suspendStack(PC &pc) {
6784 auto const jitReturn = jitReturnPre(vmfp());
6785 if (resumeModeFromActRec(vmfp()) == ResumeMode::Async) {
6786 // suspend resumed execution
6787 asyncSuspendR(pc);
6788 } else {
6789 // suspend eager execution
6790 asyncSuspendE(pc);
6792 return jitReturnPost(jitReturn);
6797 OPTBLD_INLINE TCA iopAwait(PC& pc) {
6798 auto const awaitable = vmStack().topC();
6799 auto wh = c_Awaitable::fromCell(*awaitable);
6800 if (UNLIKELY(wh == nullptr)) {
6801 SystemLib::throwBadMethodCallExceptionObject("Await on a non-Awaitable");
6803 if (LIKELY(wh->isFailed())) {
6804 throw req::root<Object>{wh->getException()};
6806 if (wh->isSucceeded()) {
6807 cellSet(wh->getResult(), *vmStack().topC());
6808 return nullptr;
6810 return suspendStack(pc);
6813 OPTBLD_INLINE TCA iopAwaitAll(PC& pc, LocalRange locals) {
6814 uint32_t cnt = 0;
6815 for (auto i = locals.first; i < locals.first + locals.count; ++i) {
6816 auto const local = *frame_local(vmfp(), i);
6817 if (cellIsNull(local)) continue;
6818 auto const awaitable = c_Awaitable::fromCell(local);
6819 if (UNLIKELY(awaitable == nullptr)) {
6820 SystemLib::throwBadMethodCallExceptionObject("Await on a non-Awaitable");
6822 if (!awaitable->isFinished()) {
6823 ++cnt;
6827 if (!cnt) {
6828 vmStack().pushNull();
6829 return nullptr;
6832 auto obj = Object::attach(c_AwaitAllWaitHandle::fromFrameNoCheck(
6833 locals.count, cnt, frame_local(vmfp(), locals.first)
6835 assertx(obj->isWaitHandle());
6836 assertx(!static_cast<c_Awaitable*>(obj.get())->isFinished());
6838 vmStack().pushObjectNoRc(obj.detach());
6839 return suspendStack(pc);
6842 OPTBLD_INLINE void iopWHResult() {
6843 // we should never emit this bytecode for non-waithandle
6844 auto const wh = c_Awaitable::fromCell(*vmStack().topC());
6845 if (UNLIKELY(!wh)) {
6846 raise_error("WHResult input was not a subclass of Awaitable");
6849 // the failure condition is likely since we punt to this opcode
6850 // in the JIT when the state is failed.
6851 if (wh->isFailed()) {
6852 throw_object(Object{wh->getException()});
6854 if (wh->isSucceeded()) {
6855 cellSet(wh->getResult(), *vmStack().topC());
6856 return;
6858 SystemLib::throwInvalidOperationExceptionObject(
6859 "Request for result on pending wait handle, "
6860 "must await or join() before calling result()");
6861 not_reached();
6864 OPTBLD_INLINE void iopCheckProp(const StringData* propName) {
6865 auto* cls = vmfp()->getClass();
6866 auto* propVec = cls->getPropData();
6867 always_assert(propVec);
6869 auto* ctx = arGetContextClass(vmfp());
6870 auto idx = ctx->lookupDeclProp(propName);
6872 auto& tv = (*propVec)[idx];
6873 vmStack().pushBool(tv.m_type != KindOfUninit);
6876 OPTBLD_INLINE void iopInitProp(const StringData* propName, InitPropOp propOp) {
6877 auto* cls = vmfp()->getClass();
6878 TypedValue* tv;
6880 auto* ctx = arGetContextClass(vmfp());
6881 auto* fr = vmStack().topC();
6883 switch (propOp) {
6884 case InitPropOp::Static: {
6885 auto const slot = ctx->lookupSProp(propName);
6886 assertx(slot != kInvalidSlot);
6887 tv = cls->getSPropData(slot);
6888 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
6889 auto const& sprop = cls->staticProperties()[slot];
6890 auto const& tc = sprop.typeConstraint;
6891 if (tc.isCheckable()) {
6892 tc.verifyStaticProperty(fr, cls, sprop.cls, sprop.name);
6895 break;
6898 case InitPropOp::NonStatic: {
6899 auto* propVec = cls->getPropData();
6900 always_assert(propVec);
6901 auto const idx = ctx->lookupDeclProp(propName);
6902 assertx(idx != kInvalidSlot);
6903 tv = &(*propVec)[idx];
6904 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
6905 auto const& prop = cls->declProperties()[idx];
6906 auto const& tc = prop.typeConstraint;
6907 if (tc.isCheckable()) tc.verifyProperty(fr, cls, prop.cls, prop.name);
6909 } break;
6912 cellDup(*fr, *tvToCell(tv));
6913 vmStack().popC();
6916 OPTBLD_INLINE void iopOODeclExists(OODeclExistsOp subop) {
6917 TypedValue* aloadTV = vmStack().topTV();
6918 if (aloadTV->m_type != KindOfBoolean) {
6919 raise_error("OODeclExists: Expected Bool on top of stack, got %s",
6920 tname(aloadTV->m_type).c_str());
6923 bool autoload = aloadTV->m_data.num;
6924 vmStack().popX();
6926 TypedValue* name = vmStack().topTV();
6927 if (!isStringType(name->m_type)) {
6928 raise_error("OODeclExists: Expected String on stack, got %s",
6929 tname(aloadTV->m_type).c_str());
6932 ClassKind kind;
6933 switch (subop) {
6934 case OODeclExistsOp::Class : kind = ClassKind::Class; break;
6935 case OODeclExistsOp::Trait : kind = ClassKind::Trait; break;
6936 case OODeclExistsOp::Interface : kind = ClassKind::Interface; break;
6938 tvAsVariant(name) = Unit::classExists(name->m_data.pstr, autoload, kind);
6941 OPTBLD_INLINE void iopSilence(local_var loc, SilenceOp subop) {
6942 switch (subop) {
6943 case SilenceOp::Start:
6944 loc.ptr->m_type = KindOfInt64;
6945 loc.ptr->m_data.num = zero_error_level();
6946 break;
6947 case SilenceOp::End:
6948 assertx(loc.ptr->m_type == KindOfInt64);
6949 restore_error_level(loc.ptr->m_data.num);
6950 break;
6954 std::string prettyStack(const std::string& prefix) {
6955 if (!vmfp()) return "__Halted";
6956 int offset = (vmfp()->m_func->unit() != nullptr)
6957 ? pcOff() : 0;
6958 auto begPrefix = prefix + "__";
6959 auto midPrefix = prefix + "|| ";
6960 auto endPrefix = prefix + "\\/";
6961 auto stack = vmStack().toString(vmfp(), offset, midPrefix);
6962 return begPrefix + "\n" + stack + endPrefix;
6965 // callable from gdb
6966 void DumpStack() {
6967 fprintf(stderr, "%s\n", prettyStack("").c_str());
6970 // callable from gdb
6971 void DumpCurUnit(int skip) {
6972 ActRec* fp = vmfp();
6973 Offset pc = fp->m_func->unit() ? pcOff() : 0;
6974 while (skip--) {
6975 fp = g_context->getPrevVMState(fp, &pc);
6977 if (fp == nullptr) {
6978 std::cout << "Don't have a valid fp\n";
6979 return;
6982 printf("Offset = %d, in function %s\n", pc, fp->m_func->name()->data());
6983 Unit* u = fp->m_func->unit();
6984 if (u == nullptr) {
6985 std::cout << "Current unit is NULL\n";
6986 return;
6988 printf("Dumping bytecode for %s(%p)\n", u->filepath()->data(), u);
6989 std::cout << u->toString();
6992 // callable from gdb
6993 void PrintTCCallerInfo() {
6994 VMRegAnchor _;
6996 auto const u = vmfp()->m_func->unit();
6997 auto const rip = []() -> jit::TCA {
6998 DECLARE_FRAME_POINTER(reg_fp);
6999 // NB: We can't directly mutate the register-mapped `reg_fp'.
7000 for (ActRec* fp = reg_fp; fp; fp = fp->m_sfp) {
7001 auto const rip = jit::TCA(fp->m_savedRip);
7002 if (jit::tc::isValidCodeAddress(rip)) return rip;
7004 return nullptr;
7005 }();
7007 fprintf(stderr, "Called from TC address %p\n", rip);
7008 std::cerr << u->filepath()->data() << ':'
7009 << u->getLineNumber(u->offsetOf(vmpc())) << '\n';
7012 // thread-local cached coverage info
7013 static __thread Unit* s_prev_unit;
7014 static __thread int s_prev_line;
7016 void recordCodeCoverage(PC /*pc*/) {
7017 Unit* unit = vmfp()->m_func->unit();
7018 assertx(unit != nullptr);
7019 if (unit == SystemLib::s_hhas_unit) {
7020 return;
7022 int line = unit->getLineNumber(pcOff());
7023 assertx(line != -1);
7025 if (unit != s_prev_unit || line != s_prev_line) {
7026 s_prev_unit = unit;
7027 s_prev_line = line;
7028 const StringData* filepath = unit->filepath();
7029 assertx(filepath->isStatic());
7030 RI().m_coverage->Record(filepath->data(), line, line);
7034 void resetCoverageCounters() {
7035 s_prev_line = -1;
7036 s_prev_unit = nullptr;
7039 static inline void
7040 condStackTraceSep(Op opcode) {
7041 TRACE(3, "%s "
7042 "========================================"
7043 "========================================\n",
7044 opcodeToName(opcode));
7047 #define COND_STACKTRACE(pfx)\
7048 ONTRACE(3, auto stack = prettyStack(pfx);\
7049 Trace::trace("%s\n", stack.c_str());)
7051 namespace {
7054 * iopWrapReturn() calls a function pointer and forwards its return value if it
7055 * returns TCA, or nullptr if returns void.
7057 template<typename... Params, typename... Args>
7058 OPTBLD_INLINE TCA iopWrapReturn(void(fn)(Params...), PC, Args&&... args) {
7059 fn(std::forward<Args>(args)...);
7060 return nullptr;
7063 template<typename... Params, typename... Args>
7064 OPTBLD_INLINE TCA iopWrapReturn(TCA(fn)(Params...), PC, Args&&... args) {
7065 return fn(std::forward<Args>(args)...);
7069 * iopSwitch and iopSSwitch take vectors containing Offset and need origpc to
7070 * translate those to PC. Special-case that here rather than creating a new
7071 * flag in hhbc.h just for this one case.
7073 template<typename... Params, typename... Args>
7074 OPTBLD_INLINE TCA iopWrapReturn(void(fn)(PC, Params...), PC origpc,
7075 Args&&... args) {
7076 fn(origpc, std::forward<Args>(args)...);
7077 return nullptr;
7081 * Some bytecodes with SA immediates want the raw Id to look up a NamedEntity
7082 * quickly, and some want the const StringData*. Support both by decoding to
7083 * this struct and implicitly converting to what the callee wants.
7085 struct litstr_id {
7086 /* implicit */ ALWAYS_INLINE operator const StringData*() const {
7087 return liveUnit()->lookupLitstrId(id);
7089 /* implicit */ ALWAYS_INLINE operator Id() const {
7090 return id;
7093 Id id{kInvalidId};
7097 * These macros are used to generate wrapper functions for the iop*() functions
7098 * defined earlier in this file. iopWrapFoo() decodes immediates from the
7099 * bytecode stream according to the signature of Foo (in hhbc.h), then calls
7100 * iopFoo() with those decoded arguments.
7102 #define FLAG_NF
7103 #define FLAG_TF
7104 #define FLAG_CF , pc
7105 #define FLAG_PF
7106 #define FLAG_CF_TF FLAG_CF
7108 #define DECODE_IVA decode_iva(pc)
7109 #define DECODE_I64A decode<int64_t>(pc)
7110 #define DECODE_LA decode_local(pc)
7111 #define DECODE_IA decode_iter(pc)
7112 #define DECODE_CAR decode_clsref_slot(pc)
7113 #define DECODE_CAW DECODE_CAR
7114 #define DECODE_DA decode<double>(pc)
7115 #define DECODE_SA decode<litstr_id>(pc)
7116 #define DECODE_AA decode_litarr(pc)
7117 #define DECODE_RATA decode_rat(pc)
7118 #define DECODE_BA origpc + decode_ba(pc)
7119 #define DECODE_OA(ty) decode<ty>(pc)
7120 #define DECODE_KA decode_member_key(pc, liveUnit())
7121 #define DECODE_LAR decodeLocalRange(pc)
7122 #define DECODE_FCA decodeFCallArgs(pc)
7123 #define DECODE_BLA decode_imm_array<Offset>(pc)
7124 #define DECODE_SLA decode_imm_array<StrVecItem>(pc)
7125 #define DECODE_ILA decode_iter_table(pc)
7126 #define DECODE_I32LA decode_imm_array<uint32_t>(pc)
7127 #define DECODE_VSA decode_imm_array<Id>(pc)
7129 #define DECODE_NA
7130 #define DECODE_ONE(a) auto const imm1 = DECODE_##a;
7131 #define DECODE_TWO(a, b) DECODE_ONE(a) auto const imm2 = DECODE_##b;
7132 #define DECODE_THREE(a, b, c) DECODE_TWO(a, b) auto const imm3 = DECODE_##c;
7133 #define DECODE_FOUR(a, b, c, d) \
7134 DECODE_THREE(a, b, c) auto const imm4 = DECODE_##d;
7135 #define DECODE_FIVE(a, b, c, d, e) \
7136 DECODE_FOUR(a, b, c, d) auto const imm5 = DECODE_##e;
7138 #define PASS_NA
7139 #define PASS_ONE(...) , imm1
7140 #define PASS_TWO(...) , imm1, imm2
7141 #define PASS_THREE(...) , imm1, imm2, imm3
7142 #define PASS_FOUR(...) , imm1, imm2, imm3, imm4
7143 #define PASS_FIVE(...) , imm1, imm2, imm3, imm4, imm5
7145 #define O(name, imm, in, out, flags) \
7146 OPTBLD_INLINE TCA iopWrap##name(PC& pc) { \
7147 UNUSED auto const op = Op::name; \
7148 UNUSED auto const origpc = pc - encoded_op_size(op); \
7149 DECODE_##imm \
7150 return iopWrapReturn(iop##name, origpc FLAG_##flags PASS_##imm); \
7152 OPCODES
7154 #undef FLAG_NF
7155 #undef FLAG_TF
7156 #undef FLAG_CF
7157 #undef FLAG_PF
7158 #undef FLAG_CF_TF
7160 #undef DECODE_IVA
7161 #undef DECODE_I64A
7162 #undef DECODE_LA
7163 #undef DECODE_IA
7164 #undef DECODE_CAR
7165 #undef DECODE_CAW
7166 #undef DECODE_DA
7167 #undef DECODE_SA
7168 #undef DECODE_AA
7169 #undef DECODE_RATA
7170 #undef DECODE_BA
7171 #undef DECODE_OA
7172 #undef DECODE_KA
7173 #undef DECODE_LAR
7174 #undef DECODE_FCA
7175 #undef DECODE_BLA
7176 #undef DECODE_SLA
7177 #undef DECODE_ILA
7178 #undef DECODE_I32LA
7179 #undef DECODE_VSA
7181 #undef DECODE_NA
7182 #undef DECODE_ONE
7183 #undef DECODE_TWO
7184 #undef DECODE_THREE
7185 #undef DECODE_FOUR
7186 #undef DECODE_FIVE
7188 #undef PASS_NA
7189 #undef PASS_ONE
7190 #undef PASS_TWO
7191 #undef PASS_THREE
7192 #undef PASS_FOUR
7193 #undef PASS_FIVE
7195 #undef O
7200 * The interpOne functions are fat wrappers around the iop* functions, mostly
7201 * adding a bunch of debug-only logging and stats tracking.
7203 #define O(opcode, imm, push, pop, flags) \
7204 TCA interpOne##opcode(ActRec* fp, TypedValue* sp, Offset pcOff) { \
7205 interp_set_regs(fp, sp, pcOff); \
7206 SKTRACE(5, liveSK(), \
7207 "%40s %p %p\n", \
7208 "interpOne" #opcode " before (fp,sp)", vmfp(), vmsp()); \
7209 if (Stats::enableInstrCount()) { \
7210 Stats::inc(Stats::Instr_Transl##opcode, -1); \
7211 Stats::inc(Stats::Instr_InterpOne##opcode); \
7213 if (Trace::moduleEnabled(Trace::interpOne, 1)) { \
7214 static const StringData* cat = makeStaticString("interpOne"); \
7215 static const StringData* name = makeStaticString(#opcode); \
7216 Stats::incStatGrouped(cat, name, 1); \
7218 if (Trace::moduleEnabled(Trace::ringbuffer)) { \
7219 auto sk = liveSK().toAtomicInt(); \
7220 Trace::ringbufferEntry(Trace::RBTypeInterpOne, sk, 0); \
7222 INC_TPC(interp_one) \
7223 /* Correct for over-counting in TC-stats. */ \
7224 Stats::inc(Stats::Instr_TC, -1); \
7225 condStackTraceSep(Op##opcode); \
7226 COND_STACKTRACE("op"#opcode" pre: "); \
7227 PC pc = vmpc(); \
7228 ONTRACE(1, auto offset = vmfp()->m_func->unit()->offsetOf(pc); \
7229 Trace::trace("op"#opcode" offset: %d\n", offset)); \
7230 assertx(peek_op(pc) == Op::opcode); \
7231 pc += encoded_op_size(Op::opcode); \
7232 auto const retAddr = iopWrap##opcode(pc); \
7233 vmpc() = pc; \
7234 COND_STACKTRACE("op"#opcode" post: "); \
7235 condStackTraceSep(Op##opcode); \
7237 * Only set regstate back to dirty if an exception is not
7238 * propagating. If an exception is throwing, regstate for this call
7239 * is actually still correct, and we don't have information in the
7240 * fixup map for interpOne calls anyway.
7241 */ \
7242 tl_regState = VMRegState::DIRTY; \
7243 return retAddr; \
7245 OPCODES
7246 #undef O
7248 InterpOneFunc interpOneEntryPoints[] = {
7249 #define O(opcode, imm, push, pop, flags) &interpOne##opcode,
7250 OPCODES
7251 #undef O
7254 template <bool breakOnCtlFlow>
7255 TCA dispatchImpl() {
7256 // Unfortunately, MSVC doesn't support computed
7257 // gotos, so use a switch instead.
7258 bool collectCoverage = RID().getCoverage();
7260 #ifndef _MSC_VER
7261 static const void *optabDirect[] = {
7262 #define O(name, imm, push, pop, flags) \
7263 &&Label##name,
7264 OPCODES
7265 #undef O
7267 static const void *optabDbg[] = {
7268 #define O(name, imm, push, pop, flags) \
7269 &&LabelDbg##name,
7270 OPCODES
7271 #undef O
7273 static const void *optabCover[] = {
7274 #define O(name, imm, push, pop, flags) \
7275 &&LabelCover##name,
7276 OPCODES
7277 #undef O
7279 assertx(sizeof(optabDirect) / sizeof(const void *) == Op_count);
7280 assertx(sizeof(optabDbg) / sizeof(const void *) == Op_count);
7281 const void **optab = optabDirect;
7282 if (collectCoverage) {
7283 optab = optabCover;
7285 DEBUGGER_ATTACHED_ONLY(optab = optabDbg);
7286 #endif
7288 bool isCtlFlow = false;
7289 TCA retAddr = nullptr;
7290 Op op;
7292 #ifdef _MSC_VER
7293 # define DISPATCH_ACTUAL() goto DispatchSwitch
7294 #else
7295 # define DISPATCH_ACTUAL() goto *optab[size_t(op)]
7296 #endif
7298 #define DISPATCH() do { \
7299 if (breakOnCtlFlow && isCtlFlow) { \
7300 ONTRACE(1, \
7301 Trace::trace("dispatch: Halt dispatch(%p)\n", \
7302 vmfp())); \
7303 return retAddr; \
7305 opPC = pc; \
7306 op = decode_op(pc); \
7307 COND_STACKTRACE("dispatch: "); \
7308 FTRACE(1, "dispatch: {}: {}\n", pcOff(), \
7309 instrToString(opPC, vmfp()->m_func->unit())); \
7310 DISPATCH_ACTUAL(); \
7311 } while (0)
7313 ONTRACE(1, Trace::trace("dispatch: Enter dispatch(%p)\n",
7314 vmfp()));
7315 PC pc = vmpc();
7316 PC opPC;
7317 DISPATCH();
7319 #define OPCODE_DBG_BODY(name, imm, push, pop, flags) \
7320 phpDebuggerOpcodeHook(opPC)
7321 #define OPCODE_COVER_BODY(name, imm, push, pop, flags) \
7322 if (collectCoverage) { \
7323 recordCodeCoverage(opPC); \
7325 #define OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
7327 if (breakOnCtlFlow && Stats::enableInstrCount()) { \
7328 Stats::inc(Stats::Instr_InterpBB##name); \
7330 retAddr = iopWrap##name(pc); \
7331 vmpc() = pc; \
7332 if (breakOnCtlFlow) { \
7333 isCtlFlow = instrIsControlFlow(Op::name); \
7335 if (instrCanHalt(Op::name) && UNLIKELY(!pc)) { \
7336 vmfp() = nullptr; \
7337 /* We returned from the top VM frame in this nesting level. This means
7338 * m_savedRip in our ActRec must have been callToExit, which should've
7339 * been returned by jitReturnPost(), whether or not we were called from
7340 * the TC. We only actually return callToExit to our caller if that
7341 * caller is dispatchBB(). */ \
7342 assertx(retAddr == jit::tc::ustubs().callToExit); \
7343 return breakOnCtlFlow ? retAddr : nullptr; \
7345 assertx(isCtlFlow || !retAddr); \
7346 DISPATCH(); \
7349 #ifdef _MSC_VER
7350 DispatchSwitch:
7351 switch (uint8_t(op)) {
7352 #define O(name, imm, push, pop, flags) \
7353 case Op::name: { \
7354 DEBUGGER_ATTACHED_ONLY(OPCODE_DBG_BODY(name, imm, push, pop, flags)); \
7355 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
7356 OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
7358 #else
7359 #define O(name, imm, push, pop, flags) \
7360 LabelDbg##name: \
7361 OPCODE_DBG_BODY(name, imm, push, pop, flags); \
7362 LabelCover##name: \
7363 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
7364 Label##name: \
7365 OPCODE_MAIN_BODY(name, imm, push, pop, flags)
7366 #endif
7368 OPCODES
7370 #ifdef _MSC_VER
7372 #endif
7373 #undef O
7374 #undef DISPATCH
7375 #undef DISPATCH_ACTUAL
7376 #undef OPCODE_DBG_BODY
7377 #undef OPCODE_COVER_BODY
7378 #undef OPCODE_MAIN_BODY
7380 assertx(retAddr == nullptr);
7381 return nullptr;
7384 static void dispatch() {
7385 WorkloadStats guard(WorkloadStats::InInterp);
7387 DEBUG_ONLY auto const retAddr = dispatchImpl<false>();
7388 assertx(retAddr == nullptr);
7391 // We are about to go back to translated code, check whether we should
7392 // stick with the interpreter. NB: if we've just executed a return
7393 // from pseudomain, then there's no PC and no more code to interpret.
7394 OPTBLD_INLINE TCA switchModeForDebugger(TCA retAddr) {
7395 if (DEBUGGER_FORCE_INTR && (vmpc() != 0)) {
7396 if (retAddr) {
7397 // We just interpreted a bytecode that decided we need to return to an
7398 // address in the TC rather than interpreting up into our caller. This
7399 // means it might not be safe to throw an exception right now (see
7400 // discussion in jitReturnPost). So, resume execution in the TC at a stub
7401 // that will throw the execution from a safe place.
7402 FTRACE(1, "Want to throw VMSwitchMode but retAddr = {}, "
7403 "overriding with throwSwitchMode stub.\n", retAddr);
7404 return jit::tc::ustubs().throwSwitchMode;
7405 } else {
7406 throw VMSwitchMode();
7410 return retAddr;
7413 TCA dispatchBB() {
7414 auto sk = [] {
7415 return SrcKey(vmfp()->func(), vmpc(), resumeModeFromActRec(vmfp()),
7416 vmfp()->func()->cls() && vmfp()->hasThis());
7419 if (Trace::moduleEnabled(Trace::dispatchBB)) {
7420 static auto cat = makeStaticString("dispatchBB");
7421 auto name = makeStaticString(show(sk()));
7422 Stats::incStatGrouped(cat, name, 1);
7424 if (Trace::moduleEnabled(Trace::ringbuffer)) {
7425 Trace::ringbufferEntry(Trace::RBTypeDispatchBB, sk().toAtomicInt(), 0);
7427 auto retAddr = dispatchImpl<true>();
7428 return switchModeForDebugger(retAddr);