Allow tagFromProgramCounter to fail
[hiphop-php.git] / hphp / runtime / vm / bytecode.cpp
blob9a5591c23f3590821b590c9071d8f2b16df87712
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
18 #include "hphp/runtime/vm/bytecode.h"
20 #include <algorithm>
21 #include <string>
22 #include <vector>
23 #include <sstream>
24 #include <iostream>
25 #include <iomanip>
26 #include <cinttypes>
28 #include <boost/filesystem.hpp>
30 #include <folly/String.h>
31 #include <folly/portability/SysMman.h>
33 #include "hphp/util/numa.h"
34 #include "hphp/util/portability.h"
35 #include "hphp/util/ringbuffer.h"
36 #include "hphp/util/text-util.h"
37 #include "hphp/util/trace.h"
39 #include "hphp/system/systemlib.h"
41 #include "hphp/runtime/base/apc-stats.h"
42 #include "hphp/runtime/base/apc-typed-value.h"
43 #include "hphp/runtime/base/array-init.h"
44 #include "hphp/runtime/base/array-provenance.h"
45 #include "hphp/runtime/base/code-coverage.h"
46 #include "hphp/runtime/base/collections.h"
47 #include "hphp/runtime/base/container-functions.h"
48 #include "hphp/runtime/base/enum-util.h"
49 #include "hphp/runtime/base/execution-context.h"
50 #include "hphp/runtime/base/externals.h"
51 #include "hphp/runtime/base/hhprof.h"
52 #include "hphp/runtime/base/memory-manager.h"
53 #include "hphp/runtime/base/mixed-array.h"
54 #include "hphp/runtime/base/object-data.h"
55 #include "hphp/runtime/base/set-array.h"
56 #include "hphp/runtime/base/program-functions.h"
57 #include "hphp/runtime/base/rds.h"
58 #include "hphp/runtime/base/repo-auth-type-codec.h"
59 #include "hphp/runtime/base/runtime-error.h"
60 #include "hphp/runtime/base/runtime-option.h"
61 #include "hphp/runtime/base/stat-cache.h"
62 #include "hphp/runtime/base/stats.h"
63 #include "hphp/runtime/base/strings.h"
64 #include "hphp/runtime/base/type-structure.h"
65 #include "hphp/runtime/base/type-structure-helpers.h"
66 #include "hphp/runtime/base/type-structure-helpers-defs.h"
67 #include "hphp/runtime/base/tv-arith.h"
68 #include "hphp/runtime/base/tv-comparisons.h"
69 #include "hphp/runtime/base/tv-conversions.h"
70 #include "hphp/runtime/base/tv-refcount.h"
71 #include "hphp/runtime/base/tv-type.h"
72 #include "hphp/runtime/base/unit-cache.h"
74 #include "hphp/runtime/ext/array/ext_array.h"
75 #include "hphp/runtime/ext/asio/ext_await-all-wait-handle.h"
76 #include "hphp/runtime/ext/asio/ext_async-function-wait-handle.h"
77 #include "hphp/runtime/ext/asio/ext_async-generator-wait-handle.h"
78 #include "hphp/runtime/ext/asio/ext_async-generator.h"
79 #include "hphp/runtime/ext/asio/ext_static-wait-handle.h"
80 #include "hphp/runtime/ext/asio/ext_wait-handle.h"
81 #include "hphp/runtime/ext/asio/ext_waitable-wait-handle.h"
82 #include "hphp/runtime/ext/std/ext_std_closure.h"
83 #include "hphp/runtime/ext/extension.h"
84 #include "hphp/runtime/ext/generator/ext_generator.h"
85 #include "hphp/runtime/ext/hh/ext_hh.h"
86 #include "hphp/runtime/ext/reflection/ext_reflection.h"
87 #include "hphp/runtime/ext/std/ext_std_variable.h"
88 #include "hphp/runtime/ext/string/ext_string.h"
89 #include "hphp/runtime/ext/hash/hash_murmur.h"
90 #include "hphp/runtime/ext/json/JSON_parser.h"
92 #include "hphp/runtime/server/rpc-request-handler.h"
93 #include "hphp/runtime/server/source-root-info.h"
95 #include "hphp/runtime/vm/act-rec-defs.h"
96 #include "hphp/runtime/vm/act-rec.h"
97 #include "hphp/runtime/vm/class.h"
98 #include "hphp/runtime/vm/class-meth-data-ref.h"
99 #include "hphp/runtime/vm/debug/debug.h"
100 #include "hphp/runtime/vm/debugger-hook.h"
101 #include "hphp/runtime/vm/event-hook.h"
102 #include "hphp/runtime/ext/functioncredential/ext_functioncredential.h"
103 #include "hphp/runtime/vm/globals-array.h"
104 #include "hphp/runtime/vm/hh-utils.h"
105 #include "hphp/runtime/vm/hhbc-codec.h"
106 #include "hphp/runtime/vm/hhbc.h"
107 #include "hphp/runtime/vm/interp-helpers.h"
108 #include "hphp/runtime/vm/member-operations.h"
109 #include "hphp/runtime/vm/memo-cache.h"
110 #include "hphp/runtime/vm/method-lookup.h"
111 #include "hphp/runtime/vm/native.h"
112 #include "hphp/runtime/vm/reified-generics.h"
113 #include "hphp/runtime/vm/repo-global-data.h"
114 #include "hphp/runtime/vm/repo.h"
115 #include "hphp/runtime/vm/resumable.h"
116 #include "hphp/runtime/vm/runtime.h"
117 #include "hphp/runtime/vm/srckey.h"
118 #include "hphp/runtime/vm/type-constraint.h"
119 #include "hphp/runtime/vm/type-profile.h"
120 #include "hphp/runtime/vm/unwind.h"
121 #include "hphp/runtime/vm/workload-stats.h"
123 #include "hphp/runtime/vm/jit/code-cache.h"
124 #include "hphp/runtime/vm/jit/debugger.h"
125 #include "hphp/runtime/vm/jit/enter-tc.h"
126 #include "hphp/runtime/vm/jit/perf-counters.h"
127 #include "hphp/runtime/vm/jit/tc.h"
128 #include "hphp/runtime/vm/jit/translator-inline.h"
129 #include "hphp/runtime/vm/jit/translator-runtime.h"
130 #include "hphp/runtime/vm/jit/translator.h"
131 #include "hphp/runtime/vm/jit/unwind-itanium.h"
134 namespace HPHP {
136 TRACE_SET_MOD(bcinterp);
138 // RepoAuthoritative has been raptured out of runtime_option.cpp. It needs
139 // to be closer to other bytecode.cpp data.
140 bool RuntimeOption::RepoAuthoritative = false;
142 using jit::TCA;
144 // GCC 4.8 has some real problems with all the inlining in this file, so don't
145 // go overboard with that version.
146 #if !defined(NDEBUG) || ((__GNUC__ == 4) && (__GNUC_MINOR__ == 8))
147 #define OPTBLD_INLINE
148 #define OPTBLD_FLT_INLINE
149 #else
150 #define OPTBLD_INLINE ALWAYS_INLINE
151 #define OPTBLD_FLT_INLINE INLINE_FLATTEN
152 #endif
154 Class* arGetContextClass(const ActRec* ar) {
155 if (ar == nullptr) {
156 return nullptr;
158 return ar->m_func->cls();
161 void frame_free_locals_no_hook(ActRec* fp) {
162 frame_free_locals_inl_no_hook(fp, fp->func()->numLocals());
165 const StaticString s_file("file");
166 const StaticString s_line("line");
168 ///////////////////////////////////////////////////////////////////////////////
170 //=============================================================================
171 // Miscellaneous decoders.
173 inline const char* prettytype(int) { return "int"; }
174 inline const char* prettytype(long) { return "long"; }
175 inline const char* prettytype(long long) { return "long long"; }
176 inline const char* prettytype(double) { return "double"; }
177 inline const char* prettytype(unsigned) { return "unsigned"; }
178 inline const char* prettytype(OODeclExistsOp) { return "OpDeclExistsOp"; }
179 inline const char* prettytype(FatalOp) { return "FatalOp"; }
180 inline const char* prettytype(IsTypeOp) { return "IsTypeOp"; }
181 inline const char* prettytype(SetOpOp) { return "SetOpOp"; }
182 inline const char* prettytype(IncDecOp) { return "IncDecOp"; }
183 inline const char* prettytype(ObjMethodOp) { return "ObjMethodOp"; }
184 inline const char* prettytype(BareThisOp) { return "BareThisOp"; }
185 inline const char* prettytype(InitPropOp) { return "InitPropOp"; }
186 inline const char* prettytype(SilenceOp) { return "SilenceOp"; }
187 inline const char* prettytype(SwitchKind) { return "SwitchKind"; }
188 inline const char* prettytype(MOpMode) { return "MOpMode"; }
189 inline const char* prettytype(QueryMOp) { return "QueryMOp"; }
190 inline const char* prettytype(SetRangeOp) { return "SetRangeOp"; }
191 inline const char* prettytype(TypeStructResolveOp) {
192 return "TypeStructResolveOp";
194 inline const char* prettytype(CudOp) { return "CudOp"; }
195 inline const char* prettytype(ContCheckOp) { return "ContCheckOp"; }
196 inline const char* prettytype(SpecialClsRef) { return "SpecialClsRef"; }
197 inline const char* prettytype(CollectionType) { return "CollectionType"; }
199 // load a T value from *pc without incrementing
200 template<class T> T peek(PC pc) {
201 T v;
202 std::memcpy(&v, pc, sizeof v);
203 TRACE(2, "decode: Immediate %s %" PRIi64"\n", prettytype(v), int64_t(v));
204 return v;
207 template<class T> T decode(PC& pc) {
208 auto v = peek<T>(pc);
209 pc += sizeof(T);
210 return v;
213 inline const StringData* decode_litstr(PC& pc) {
214 auto id = decode<Id>(pc);
215 return liveUnit()->lookupLitstrId(id);
218 inline const ArrayData* decode_litarr(PC& pc) {
219 return liveUnit()->lookupArrayId(decode<Id>(pc));
222 namespace {
224 // wrapper for local variable LA operand
225 struct local_var {
226 TypedValue* ptr;
227 int32_t index;
228 TypedValue* operator->() const { return ptr; }
229 TypedValue& operator*() const { return *ptr; }
232 // wrapper to handle unaligned access to variadic immediates
233 template<class T> struct imm_array {
234 uint32_t const size;
235 PC const ptr;
237 explicit imm_array(uint32_t size, PC pc)
238 : size{size}
239 , ptr{pc}
242 T operator[](uint32_t i) const {
243 T e;
244 memcpy(&e, ptr + i * sizeof(T), sizeof(T));
245 return e;
251 ALWAYS_INLINE local_var decode_local(PC& pc) {
252 auto la = decode_iva(pc);
253 assertx(la < vmfp()->m_func->numLocals());
254 return local_var{frame_local(vmfp(), la), safe_cast<int32_t>(la)};
257 ALWAYS_INLINE Iter* decode_iter(PC& pc) {
258 auto ia = decode_iva(pc);
259 return frame_iter(vmfp(), ia);
262 template<typename T>
263 OPTBLD_INLINE imm_array<T> decode_imm_array(PC& pc) {
264 auto const size = decode_iva(pc);
265 auto const arr_pc = pc;
266 pc += size * sizeof(T);
267 return imm_array<T>{size, arr_pc};
270 OPTBLD_INLINE IterTable decode_iter_table(PC& pc) {
271 return iterTableFromStream(pc);
274 OPTBLD_INLINE RepoAuthType decode_rat(PC& pc) {
275 if (debug) return decodeRAT(liveUnit(), pc);
277 pc += encodedRATSize(pc);
278 return RepoAuthType{};
281 //=============================================================================
282 // Miscellaneous helpers.
284 static inline Class* frameStaticClass(ActRec* fp) {
285 if (!fp->func()->cls()) return nullptr;
286 if (fp->hasThis()) {
287 return fp->getThis()->getVMClass();
289 return fp->getClass();
292 //=============================================================================
293 // VarEnv.
295 const StaticString s_GLOBALS("GLOBALS");
297 void VarEnv::createGlobal() {
298 assertx(!g_context->m_globalVarEnv);
299 g_context->m_globalVarEnv = req::make_raw<VarEnv>();
302 VarEnv::VarEnv()
303 : m_nvTable()
304 , m_extraArgs(nullptr)
305 , m_depth(0)
306 , m_global(true)
308 TRACE(3, "Creating VarEnv %p [global scope]\n", this);
309 auto globals_var = Variant::attach(
310 new (tl_heap->objMalloc(sizeof(GlobalsArray))) GlobalsArray(&m_nvTable)
312 m_nvTable.set(s_GLOBALS.get(), globals_var.asTypedValue());
315 VarEnv::VarEnv(ActRec* fp, ExtraArgs* eArgs)
316 : m_nvTable(fp)
317 , m_extraArgs(eArgs)
318 , m_depth(1)
319 , m_global(false)
321 assertx(fp->func()->attrs() & AttrMayUseVV);
322 TRACE(3, "Creating lazily attached VarEnv %p on stack\n", this);
325 VarEnv::VarEnv(const VarEnv* varEnv, ActRec* fp)
326 : m_nvTable(varEnv->m_nvTable, fp)
327 , m_extraArgs(varEnv->m_extraArgs ? varEnv->m_extraArgs->clone(fp) : nullptr)
328 , m_depth(1)
329 , m_global(false)
331 assertx(varEnv->m_depth == 1);
332 assertx(!varEnv->m_global);
333 assertx(fp->func()->attrs() & AttrMayUseVV);
335 TRACE(3, "Cloning VarEnv %p to %p\n", varEnv, this);
338 VarEnv::~VarEnv() {
339 TRACE(3, "Destroying VarEnv %p [%s]\n",
340 this,
341 isGlobalScope() ? "global scope" : "local scope");
342 assertx(isGlobalScope() == (g_context->m_globalVarEnv == this));
344 if (isGlobalScope()) {
346 * When detaching the global scope, we leak any live objects (and
347 * let MemoryManager clean them up). This is because we're
348 * not supposed to run destructors for objects that are live at
349 * the end of a request.
351 m_nvTable.unset(s_GLOBALS.get());
352 m_nvTable.leak();
354 // at this point, m_nvTable is destructed, and GlobalsArray
355 // has a dangling pointer to it.
358 void VarEnv::deallocate(ActRec* fp) {
359 fp->m_varEnv->exitFP(fp);
362 VarEnv* VarEnv::createLocal(ActRec* fp) {
363 return req::make_raw<VarEnv>(fp, fp->getExtraArgs());
366 VarEnv* VarEnv::clone(ActRec* fp) const {
367 return req::make_raw<VarEnv>(this, fp);
370 void VarEnv::suspend(const ActRec* oldFP, ActRec* newFP) {
371 m_nvTable.suspend(oldFP, newFP);
374 void VarEnv::enterFP(ActRec* oldFP, ActRec* newFP) {
375 TRACE(3, "Attaching VarEnv %p [%s] %d fp @%p\n",
376 this,
377 isGlobalScope() ? "global scope" : "local scope",
378 int(newFP->m_func->numNamedLocals()), newFP);
379 assertx(newFP);
380 if (oldFP == nullptr) {
381 assertx(isGlobalScope() && m_depth == 0);
382 } else {
383 assertx(m_depth >= 1);
384 assertx(g_context->getPrevVMStateSkipFrame(newFP) == oldFP);
385 if (debug) {
386 auto prev = newFP;
387 while (true) {
388 prev = g_context->getPrevVMState(prev);
389 if (prev == oldFP) break;
390 assertx(!(prev->m_func->attrs() & AttrMayUseVV) || !prev->hasVarEnv());
393 m_nvTable.detach(oldFP);
396 assertx(newFP->func()->attrs() & AttrMayUseVV);
397 m_nvTable.attach(newFP);
398 m_depth++;
401 void VarEnv::exitFP(ActRec* fp) {
402 TRACE(3, "Detaching VarEnv %p [%s] @%p\n",
403 this,
404 isGlobalScope() ? "global scope" : "local scope",
405 fp);
406 assertx(fp);
407 assertx(m_depth > 0);
409 m_depth--;
410 m_nvTable.detach(fp);
412 if (m_depth == 0) {
413 if (m_extraArgs) {
414 assertx(!isGlobalScope());
415 const auto numExtra = fp->numArgs() - fp->m_func->numNonVariadicParams();
416 ExtraArgs::deallocate(m_extraArgs, numExtra);
419 // don't free global VarEnv
420 if (!isGlobalScope()) {
421 req::destroy_raw(this);
423 } else {
424 while (true) {
425 auto const prevFP = g_context->getPrevVMState(fp);
426 if (prevFP->func()->attrs() & AttrMayUseVV &&
427 prevFP->m_varEnv == this) {
428 m_nvTable.attach(prevFP);
429 break;
431 fp = prevFP;
436 void VarEnv::set(const StringData* name, tv_rval tv) {
437 m_nvTable.set(name, tv);
440 TypedValue* VarEnv::lookup(const StringData* name) {
441 return m_nvTable.lookup(name);
444 TypedValue* VarEnv::lookupAdd(const StringData* name) {
445 return m_nvTable.lookupAdd(name);
448 bool VarEnv::unset(const StringData* name) {
449 m_nvTable.unset(name);
450 return true;
453 const StaticString s_closure_var("0Closure");
454 const StaticString s_reified_generics_var("0ReifiedGenerics");
456 Array VarEnv::getDefinedVariables() const {
457 Array ret = Array::Create();
459 NameValueTable::Iterator iter(&m_nvTable);
460 for (; iter.valid(); iter.next()) {
461 auto const sd = iter.curKey();
462 auto const tv = iter.curVal();
463 // Closures have an internal 0Closure variable
464 // Reified functions have an internal 0ReifiedGenerics variable
465 if (s_closure_var.equal(sd) || s_reified_generics_var.equal(sd)) {
466 continue;
468 if (tvAsCVarRef(tv).isReferenced()) {
469 ret.setWithRef(StrNR(sd).asString(), tvAsCVarRef(tv));
470 } else {
471 ret.set(StrNR(sd).asString(), tvAsCVarRef(tv));
475 // Make result independent of the hashtable implementation.
476 ArrayData* sorted = ret->escalateForSort(SORTFUNC_KSORT);
477 assertx(sorted == ret.get() ||
478 sorted->empty() ||
479 sorted->hasExactlyOneRef());
480 SCOPE_EXIT {
481 if (sorted != ret.get()) {
482 ret = Array::attach(sorted);
485 sorted->ksort(0, true);
487 return ret;
490 TypedValue* VarEnv::getExtraArg(unsigned argInd) const {
491 return m_extraArgs->getExtraArg(argInd);
494 //=============================================================================
496 ExtraArgs::ExtraArgs() {}
497 ExtraArgs::~ExtraArgs() {}
499 void* ExtraArgs::allocMem(unsigned nargs) {
500 assertx(nargs > 0);
501 return req::malloc(
502 sizeof(TypedValue) * nargs + sizeof(ExtraArgs),
503 type_scan::getIndexForMalloc<
504 ExtraArgs,
505 type_scan::Action::WithSuffix<TypedValue>
510 ExtraArgs* ExtraArgs::allocateCopy(TypedValue* args, unsigned nargs) {
511 void* mem = allocMem(nargs);
512 ExtraArgs* ea = new (mem) ExtraArgs();
515 * The stack grows downward, so the args in memory are "backward"; i.e. the
516 * leftmost (in PHP) extra arg is highest in memory.
518 std::reverse_copy(args, args + nargs, &ea->m_extraArgs[0]);
519 return ea;
522 ExtraArgs* ExtraArgs::allocateUninit(unsigned nargs) {
523 void* mem = ExtraArgs::allocMem(nargs);
524 return new (mem) ExtraArgs();
527 void ExtraArgs::deallocate(ExtraArgs* ea, unsigned nargs) {
528 assertx(nargs > 0);
529 for (unsigned i = 0; i < nargs; ++i) {
530 tvDecRefGen(ea->m_extraArgs + i);
532 deallocateRaw(ea);
535 void ExtraArgs::deallocate(ActRec* ar) {
536 const int numExtra = ar->numArgs() - ar->m_func->numNonVariadicParams();
537 deallocate(ar->getExtraArgs(), numExtra);
540 void ExtraArgs::deallocateRaw(ExtraArgs* ea) {
541 ea->~ExtraArgs();
542 req::free(ea);
545 ExtraArgs* ExtraArgs::clone(ActRec* ar) const {
546 const int numExtra = ar->numArgs() - ar->m_func->numParams();
547 auto ret = allocateUninit(numExtra);
548 for (int i = 0; i < numExtra; ++i) {
549 tvDupWithRef(m_extraArgs[i], ret->m_extraArgs[i]);
551 return ret;
554 TypedValue* ExtraArgs::getExtraArg(unsigned argInd) const {
555 return const_cast<TypedValue*>(&m_extraArgs[argInd]);
558 //=============================================================================
559 // Stack.
561 // Store actual stack elements array in a thread-local in order to amortize the
562 // cost of allocation.
563 struct StackElms {
564 ~StackElms() { free(m_elms); }
565 TypedValue* elms() {
566 if (m_elms == nullptr) {
567 // RuntimeOption::EvalVMStackElms-sized and -aligned.
568 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
569 if (posix_memalign((void**)&m_elms, algnSz, algnSz) != 0) {
570 throw std::runtime_error(
571 std::string("VM stack initialization failed: ") +
572 folly::errnoStr(errno).c_str());
574 madvise(m_elms, algnSz, MADV_DONTNEED);
576 return m_elms;
578 void flush() {
579 if (m_elms != nullptr) {
580 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
581 madvise(m_elms, algnSz, MADV_DONTNEED);
584 private:
585 TypedValue* m_elms{nullptr};
587 THREAD_LOCAL_FLAT(StackElms, t_se);
589 const int Stack::sSurprisePageSize = sysconf(_SC_PAGESIZE);
590 // We reserve the bottom page of each stack for use as the surprise
591 // page, so the minimum useful stack size is the next power of two.
592 const uint32_t Stack::sMinStackElms =
593 2 * sSurprisePageSize / sizeof(TypedValue);
595 void Stack::ValidateStackSize() {
596 if (RuntimeOption::EvalVMStackElms < sMinStackElms) {
597 throw std::runtime_error(folly::sformat(
598 "VM stack size of {:#x} is below the minimum of {:#x}",
599 RuntimeOption::EvalVMStackElms,
600 sMinStackElms
603 if (!folly::isPowTwo(RuntimeOption::EvalVMStackElms)) {
604 throw std::runtime_error(folly::sformat(
605 "VM stack size of {:#x} is not a power of 2",
606 RuntimeOption::EvalVMStackElms
611 Stack::Stack()
612 : m_elms(nullptr), m_top(nullptr), m_base(nullptr) {
615 Stack::~Stack() {
616 requestExit();
619 void Stack::requestInit() {
620 m_elms = t_se->elms();
621 // Burn one element of the stack, to satisfy the constraint that
622 // valid m_top values always have the same high-order (>
623 // log(RuntimeOption::EvalVMStackElms)) bits.
624 m_top = m_base = m_elms + RuntimeOption::EvalVMStackElms - 1;
626 rds::header()->stackLimitAndSurprise.store(
627 reinterpret_cast<uintptr_t>(
628 reinterpret_cast<char*>(m_elms) + sSurprisePageSize +
629 kStackCheckPadding * sizeof(Cell)
631 std::memory_order_release
633 assertx(!(rds::header()->stackLimitAndSurprise.load() & kSurpriseFlagMask));
635 // Because of the surprise page at the bottom of the stack we lose an
636 // additional 256 elements which must be taken into account when checking for
637 // overflow.
638 UNUSED size_t maxelms =
639 RuntimeOption::EvalVMStackElms - sSurprisePageSize / sizeof(TypedValue);
640 assertx(!wouldOverflow(maxelms - 1));
641 assertx(wouldOverflow(maxelms));
644 void Stack::requestExit() {
645 m_elms = nullptr;
648 void flush_evaluation_stack() {
649 if (vmStack().isAllocated()) {
650 // For RPCRequestHandler threads, the ExecutionContext can stay
651 // alive across requests, but its always ok to kill it between
652 // requests, so do so now
653 RPCRequestHandler::cleanupState();
656 tl_heap->flush();
658 if (!t_se.isNull()) {
659 t_se->flush();
661 rds::flush();
662 json_parser_flush_caches();
664 always_assert(tl_heap->empty());
667 static std::string toStringElm(const TypedValue* tv) {
668 std::ostringstream os;
670 if (!isRealType(tv->m_type)) {
671 os << " ??? type " << static_cast<data_type_t>(tv->m_type) << "\n";
672 return os.str();
674 if (isRefcountedType(tv->m_type) &&
675 !tv->m_data.pcnt->checkCount()) {
676 // OK in the invoking frame when running a destructor.
677 os << " ??? inner_count " << tvGetCount(*tv) << " ";
678 return os.str();
681 auto print_count = [&] {
682 if (tv->m_data.pcnt->isStatic()) {
683 os << ":c(static)";
684 } else if (tv->m_data.pcnt->isUncounted()) {
685 os << ":c(uncounted)";
686 } else {
687 os << ":c(" << tvGetCount(*tv) << ")";
691 switch (tv->m_type) {
692 case KindOfRef:
693 os << "V:(";
694 os << "@" << tv->m_data.pref;
695 os << toStringElm(tv->m_data.pref->cell());
696 os << ")";
697 return os.str();
698 case KindOfUninit:
699 case KindOfNull:
700 case KindOfBoolean:
701 case KindOfInt64:
702 case KindOfDouble:
703 case KindOfPersistentString:
704 case KindOfString:
705 case KindOfPersistentVec:
706 case KindOfVec:
707 case KindOfPersistentDict:
708 case KindOfDict:
709 case KindOfPersistentKeyset:
710 case KindOfKeyset:
711 case KindOfPersistentShape:
712 case KindOfShape:
713 case KindOfPersistentArray:
714 case KindOfArray:
715 case KindOfObject:
716 case KindOfResource:
717 case KindOfFunc:
718 case KindOfClass:
719 case KindOfClsMeth:
720 case KindOfRecord:
721 os << "C:";
722 break;
725 do {
726 switch (tv->m_type) {
727 case KindOfUninit:
728 os << "Uninit";
729 continue;
730 case KindOfNull:
731 os << "Null";
732 continue;
733 case KindOfBoolean:
734 os << (tv->m_data.num ? "True" : "False");
735 continue;
736 case KindOfInt64:
737 os << "0x" << std::hex << tv->m_data.num << std::dec;
738 continue;
739 case KindOfDouble:
740 os << tv->m_data.dbl;
741 continue;
742 case KindOfPersistentString:
743 case KindOfString:
745 int len = tv->m_data.pstr->size();
746 bool truncated = false;
747 if (len > 128) {
748 len = 128;
749 truncated = true;
751 os << tv->m_data.pstr;
752 print_count();
753 os << ":\""
754 << escapeStringForCPP(tv->m_data.pstr->data(), len)
755 << "\"" << (truncated ? "..." : "");
757 continue;
758 case KindOfPersistentVec:
759 case KindOfVec:
760 assertx(tv->m_data.parr->isVecArray());
761 assertx(tv->m_data.parr->checkCount());
762 os << tv->m_data.parr;
763 print_count();
764 os << ":Vec";
765 continue;
766 case KindOfPersistentDict:
767 case KindOfDict:
768 assertx(tv->m_data.parr->isDict());
769 assertx(tv->m_data.parr->checkCount());
770 os << tv->m_data.parr;
771 print_count();
772 os << ":Dict";
773 continue;
774 case KindOfPersistentKeyset:
775 case KindOfKeyset:
776 assertx(tv->m_data.parr->isKeyset());
777 assertx(tv->m_data.parr->checkCount());
778 os << tv->m_data.parr;
779 print_count();
780 os << ":Keyset";
781 continue;
782 case KindOfPersistentShape:
783 case KindOfShape:
784 assertx(tv->m_data.parr->isShape());
785 assertx(tv->m_data.parr->checkCount());
786 os << tv->m_data.parr;
787 print_count();
788 os << ":Shape";
789 continue;
790 case KindOfPersistentArray:
791 case KindOfArray:
792 assertx(tv->m_data.parr->isPHPArray());
793 assertx(tv->m_data.parr->checkCount());
794 os << tv->m_data.parr;
795 print_count();
796 os << ":Array";
797 continue;
798 case KindOfObject:
799 assertx(tv->m_data.pobj->checkCount());
800 os << tv->m_data.pobj;
801 print_count();
802 os << ":Object("
803 << tv->m_data.pobj->getClassName().get()->data()
804 << ")";
805 continue;
806 case KindOfRecord:
807 assertx(tv->m_data.prec->checkCount());
808 os << tv->m_data.prec;
809 print_count();
810 os << ":Record("
811 << tv->m_data.prec->record()->name()->data()
812 << ")";
813 continue;
814 case KindOfResource:
815 assertx(tv->m_data.pres->checkCount());
816 os << tv->m_data.pres;
817 print_count();
818 os << ":Resource("
819 << tv->m_data.pres->data()->o_getClassName().get()->data()
820 << ")";
821 continue;
822 case KindOfFunc:
823 os << ":Func("
824 << tv->m_data.pfunc->fullDisplayName()->data()
825 << ")";
826 continue;
827 case KindOfClass:
828 os << ":Class("
829 << tv->m_data.pclass->name()->data()
830 << ")";
831 continue;
832 case KindOfClsMeth:
833 os << ":ClsMeth("
834 << tv->m_data.pclsmeth->getCls()->name()->data()
835 << ", "
836 << tv->m_data.pclsmeth->getFunc()->fullDisplayName()->data()
837 << ")";
838 continue;
840 case KindOfRef:
841 break;
843 not_reached();
844 } while (0);
846 return os.str();
849 static std::string toStringIter(const Iter* it) {
850 switch (it->arr().getIterType()) {
851 case ArrayIter::TypeUndefined:
852 return "I:Undefined";
853 case ArrayIter::TypeArray:
854 return "I:Array";
855 case ArrayIter::TypeIterator:
856 return "I:Iterator";
858 assertx(false);
859 return "I:?";
863 * Return true if Offset o is inside the protected region of a fault
864 * funclet for iterId, otherwise false.
866 static bool checkIterScope(const Func* f, Offset o, Id iterId) {
867 assertx(o >= f->base() && o < f->past());
868 for (auto const& eh : f->ehtab()) {
869 if (eh.m_base <= o && o < eh.m_past &&
870 eh.m_iterId == iterId) {
871 return true;
874 return false;
877 static void toStringFrame(std::ostream& os, const ActRec* fp,
878 int offset, const TypedValue* ftop,
879 const std::string& prefix, bool isTop = true) {
880 assertx(fp);
882 // Use depth-first recursion to output the most deeply nested stack frame
883 // first.
885 Offset prevPc = 0;
886 TypedValue* prevStackTop = nullptr;
887 ActRec* prevFp = g_context->getPrevVMState(fp, &prevPc, &prevStackTop);
888 if (prevFp != nullptr) {
889 toStringFrame(os, prevFp, prevPc, prevStackTop, prefix, false);
893 os << prefix;
894 const Func* func = fp->m_func;
895 assertx(func);
896 func->validate();
897 std::string funcName(func->fullName()->data());
898 os << "{func:" << funcName
899 << ",callOff:" << fp->m_callOff
900 << ",this:0x"
901 << std::hex << (func->cls() && fp->hasThis() ? fp->getThis() : nullptr)
902 << std::dec << "}";
903 TypedValue* tv = (TypedValue*)fp;
904 tv--;
906 if (func->numLocals() > 0) {
907 // Don't print locals for parent frames on a Ret(C|V) since some of them
908 // may already be destructed.
909 if (isRet(func->unit()->getOp(offset)) && !isTop) {
910 os << "<locals destroyed>";
911 } else {
912 os << "<";
913 int n = func->numLocals();
914 for (int i = 0; i < n; i++, tv--) {
915 if (i > 0) {
916 os << " ";
918 os << toStringElm(tv);
920 os << ">";
924 if (func->numIterators() > 0) {
925 os << "|";
926 Iter* it = &((Iter*)&tv[1])[-1];
927 for (int i = 0; i < func->numIterators(); i++, it--) {
928 if (i > 0) {
929 os << " ";
931 if (checkIterScope(func, offset, i)) {
932 os << toStringIter(it);
933 } else {
934 os << "I:Undefined";
937 os << "|";
940 std::vector<std::string> stackElems;
941 visitStackElems(
942 fp, ftop, offset,
943 [&](const ActRec* ar) {
944 stackElems.push_back(
945 folly::format("{{func:{}}}", ar->m_func->fullName()->data()).str()
948 [&](const TypedValue* tv) {
949 stackElems.push_back(toStringElm(tv));
952 std::reverse(stackElems.begin(), stackElems.end());
953 os << ' ' << folly::join(' ', stackElems);
955 os << '\n';
958 std::string Stack::toString(const ActRec* fp, int offset,
959 const std::string prefix/* = "" */) const {
960 // The only way to figure out which stack elements are activation records is
961 // to follow the frame chain. However, the goal for each stack frame is to
962 // print stack fragments from deepest to shallowest -- a then b in the
963 // following example:
965 // {func:foo,callOff:51}<C:8> {func:bar} C:8 C:1 {func:biz} C:0
966 // aaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbb
968 // Use depth-first recursion to get the output order correct.
970 std::ostringstream os;
971 auto unit = fp->unit();
972 auto func = fp->func();
973 os << prefix << "=== Stack at "
974 << unit->filepath()->data() << ":"
975 << unit->getLineNumber(unit->offsetOf(vmpc()))
976 << " func " << func->fullName()->data() << " ===\n";
978 toStringFrame(os, fp, offset, m_top, prefix);
980 return os.str();
983 bool Stack::wouldOverflow(int numCells) const {
984 // The funny approach here is to validate the translator's assembly
985 // technique. We've aligned and sized the stack so that the high order
986 // bits of valid cells are all the same. In the translator, numCells
987 // can be hardcoded, and m_top is wired into a register,
988 // so the expression requires no loads.
989 intptr_t truncatedTop = intptr_t(m_top) / sizeof(TypedValue);
990 truncatedTop &= RuntimeOption::EvalVMStackElms - 1;
991 intptr_t diff = truncatedTop - numCells -
992 sSurprisePageSize / sizeof(TypedValue);
993 return diff < 0;
996 TypedValue* Stack::anyFrameStackBase(const ActRec* fp) {
997 return fp->resumed() ? Stack::resumableStackBase(fp)
998 : Stack::frameStackBase(fp);
1001 TypedValue* Stack::frameStackBase(const ActRec* fp) {
1002 assertx(!fp->resumed());
1003 return (TypedValue*)fp - fp->func()->numSlotsInFrame();
1006 TypedValue* Stack::resumableStackBase(const ActRec* fp) {
1007 assertx(fp->resumed());
1008 auto sfp = fp->sfp();
1009 if (sfp) {
1010 // The non-reentrant case occurs when a non-async or async generator is
1011 // resumed via ContEnter or ContRaise opcode. These opcodes leave a single
1012 // value on the stack that becomes part of the generator's stack. So we
1013 // find the caller's FP, compensate for its locals and iterators, and then
1014 // we've found the base of the generator's stack.
1015 assertx(fp->func()->isGenerator());
1017 // Since resumables are stored on the heap, we need to go back in the
1018 // callstack a bit to find the base of the stack. Unfortunately, due to
1019 // generator delegation, this can be pretty far back...
1020 while (sfp->func()->isGenerator()) {
1021 sfp = sfp->sfp();
1024 return (TypedValue*)sfp - sfp->func()->numSlotsInFrame();
1025 } else {
1026 // The reentrant case occurs when asio scheduler resumes an async function
1027 // or async generator. We simply use the top of stack of the previous VM
1028 // frame (since the ActRec, locals, and iters for this frame do not reside
1029 // on the VM stack).
1030 assertx(fp->func()->isAsync());
1031 return g_context.getNoCheck()->m_nestedVMs.back().sp;
1035 Array getDefinedVariables(const ActRec* fp) {
1036 if (UNLIKELY(fp == nullptr || fp->isInlined())) return empty_array();
1038 if ((fp->func()->attrs() & AttrMayUseVV) && fp->hasVarEnv()) {
1039 return fp->m_varEnv->getDefinedVariables();
1041 auto const func = fp->m_func;
1042 auto const numLocals = func->numNamedLocals();
1043 ArrayInit ret(numLocals, ArrayInit::Map{});
1044 for (Id id = 0; id < numLocals; ++id) {
1045 TypedValue* ptv = frame_local(fp, id);
1046 if (ptv->m_type == KindOfUninit) {
1047 continue;
1049 Variant name(func->localVarName(id), Variant::PersistentStrInit{});
1050 ret.add(name, tvAsVariant(ptv));
1052 return ret.toArray();
1055 NEVER_INLINE
1056 static void shuffleExtraStackArgs(ActRec* ar) {
1057 const Func* func = ar->m_func;
1058 assertx(func);
1060 // the last (variadic) param is included in numParams (since it has a
1061 // name), but the arg in that slot should be included as the first
1062 // element of the variadic array
1063 const auto numArgs = ar->numArgs();
1064 const auto numVarArgs = numArgs - func->numNonVariadicParams();
1065 assertx(numVarArgs > 0);
1067 const auto takesVariadicParam = func->hasVariadicCaptureParam();
1068 auto& stack = vmStack();
1069 if (func->attrs() & AttrMayUseVV) {
1070 auto const tvArgs = reinterpret_cast<TypedValue*>(ar) - numArgs;
1071 ar->setExtraArgs(ExtraArgs::allocateCopy(tvArgs, numVarArgs));
1072 if (takesVariadicParam) {
1073 try {
1074 VArrayInit ai{numVarArgs};
1075 for (uint32_t i = 0; i < numVarArgs; ++i) {
1076 ai.appendWithRef(*(tvArgs + numVarArgs - 1 - i));
1078 // Remove them from the stack
1079 stack.ndiscard(numVarArgs);
1080 if (RuntimeOption::EvalHackArrDVArrs) {
1081 stack.pushVecNoRc(ai.create());
1082 } else {
1083 stack.pushArrayNoRc(ai.create());
1085 // Before, for each arg: refcount = n + 1 (stack)
1086 // After, for each arg: refcount = n + 2 (ExtraArgs, varArgsArray)
1087 } catch (...) {
1088 ExtraArgs::deallocateRaw(ar->getExtraArgs());
1089 ar->resetExtraArgs();
1090 throw;
1092 } else {
1093 // Discard the arguments from the stack; they were all moved
1094 // into the extra args so we don't decref.
1095 stack.ndiscard(numVarArgs);
1097 // leave ar->numArgs reflecting the actual number of args passed
1098 } else {
1099 assertx(takesVariadicParam); // called only if extra args are used
1100 auto tvArgs = reinterpret_cast<TypedValue*>(ar) - numArgs + numVarArgs - 1;
1101 VArrayInit ai{numVarArgs};
1102 for (uint32_t i = 0; i < numVarArgs; ++i) {
1103 ai.appendWithRef(*(tvArgs--));
1105 // Discard the arguments from the stack
1106 for (uint32_t i = 0; i < numVarArgs; ++i) stack.popTV();
1107 if (RuntimeOption::EvalHackArrDVArrs) {
1108 stack.pushVecNoRc(ai.create());
1109 } else {
1110 stack.pushArrayNoRc(ai.create());
1112 assertx(func->numParams() == (numArgs - numVarArgs + 1));
1113 ar->setNumArgs(func->numParams());
1117 static void shuffleMagicArgs(ActRec* ar) {
1118 assertx(ar->magicDispatch());
1120 // We need to put this where the first argument is
1121 auto const invName = ar->clearMagicDispatch();
1122 int const nargs = ar->numArgs();
1124 // We need to make an array containing all the arguments passed by
1125 // the caller and put it where the second argument is.
1126 auto argArray = Array::attach(
1127 [&]{
1128 auto const args = reinterpret_cast<TypedValue*>(ar) - nargs;
1129 if (RuntimeOption::EvalHackArrDVArrs) {
1130 return nargs
1131 ? PackedArray::MakeVec(nargs, args)
1132 : staticEmptyVecArray();
1134 return nargs ? PackedArray::MakeVArray(nargs, args) : staticEmptyVArray();
1138 auto& stack = vmStack();
1139 // Remove the arguments from the stack; they were moved into the
1140 // array so we don't need to decref.
1141 stack.ndiscard(nargs);
1143 // Move invName to where the first argument belongs, no need
1144 // to incRef/decRef since we are transferring ownership
1145 stack.pushStringNoRc(invName);
1147 // Move argArray to where the second argument belongs. We've already
1148 // incReffed the array above so we don't need to do it here.
1149 if (RuntimeOption::EvalHackArrDVArrs) {
1150 stack.pushVecNoRc(argArray.detach());
1151 } else {
1152 stack.pushArrayNoRc(argArray.detach());
1155 ar->setNumArgs(2);
1156 ar->setVarEnv(nullptr);
1159 // This helper is meant to be called if an exception or invalidation takes
1160 // place in the process of function entry; the ActRec ar is on the stack
1161 // but is not (yet) the current (executing) frame and is followed by a
1162 // number of params
1163 static NEVER_INLINE void cleanupParamsAndActRec(Stack& stack,
1164 ActRec* ar,
1165 ExtraArgs* extraArgs,
1166 int* numParams) {
1167 assertx(stack.top() + (numParams != nullptr ? (*numParams) :
1168 extraArgs != nullptr ? ar->m_func->numParams() :
1169 ar->numArgs())
1170 == (void*)ar);
1171 if (extraArgs) {
1172 const int numExtra = ar->numArgs() - ar->m_func->numNonVariadicParams();
1173 ExtraArgs::deallocate(extraArgs, numExtra);
1175 while (stack.top() != (void*)ar) {
1176 stack.popTV();
1178 stack.popAR();
1181 static NEVER_INLINE void shuffleMagicArrayArgs(ActRec* ar, const Cell args,
1182 Stack& stack, int nregular) {
1183 assertx(ar != nullptr && ar->magicDispatch());
1184 assertx(!cellIsNull(&args));
1185 assertx(nregular >= 0);
1186 assertx((stack.top() + nregular) == (void*) ar);
1187 assertx(isContainer(args));
1188 DEBUG_ONLY const Func* f = ar->m_func;
1189 assertx(f && f->name()->isame(s___call.get()));
1191 // We'll need to make this the first argument
1192 auto const invName = ar->clearMagicDispatch();
1194 auto nargs = getContainerSize(args);
1196 if (UNLIKELY(0 == nargs)) {
1197 // We need to make an array containing all the arguments passed by
1198 // the caller and put it where the second argument is.
1199 auto argArray = Array::attach(
1200 [&]{
1201 auto const args = reinterpret_cast<TypedValue*>(ar) - nregular;
1202 if (RuntimeOption::EvalHackArrDVArrs) {
1203 return nregular
1204 ? PackedArray::MakeVec(nregular, args)
1205 : staticEmptyVecArray();
1207 return nregular
1208 ? PackedArray::MakeVArray(nregular, args)
1209 : staticEmptyVArray();
1213 // Remove the arguments from the stack; they were moved into the
1214 // array so we don't need to decref.
1215 stack.ndiscard(nregular);
1217 // Move invName to where the first argument belongs, no need
1218 // to incRef/decRef since we are transferring ownership
1219 assertx(stack.top() == (void*) ar);
1220 stack.pushStringNoRc(invName);
1222 // Move argArray to where the second argument belongs. We've already
1223 // incReffed the array above so we don't need to do it here.
1224 if (RuntimeOption::EvalHackArrDVArrs) {
1225 stack.pushVecNoRc(argArray.detach());
1226 } else {
1227 stack.pushArrayNoRc(argArray.detach());
1229 } else {
1230 if (nregular == 0 &&
1231 !RuntimeOption::EvalHackArrDVArrs &&
1232 isArrayType(args.m_type) &&
1233 args.m_data.parr->isVArray()) {
1234 assertx(stack.top() == (void*) ar);
1235 stack.pushStringNoRc(invName);
1236 stack.pushArray(args.m_data.parr);
1237 } else if (nregular == 0 &&
1238 RuntimeOption::EvalHackArrDVArrs &&
1239 isVecType(args.m_type)) {
1240 assertx(stack.top() == (void*) ar);
1241 stack.pushStringNoRc(invName);
1242 stack.pushVec(args.m_data.parr);
1243 } else {
1244 VArrayInit ai(nargs + nregular);
1245 // The arguments are pushed in order, so we should refer them by
1246 // index instead of taking the top, that would lead to reverse order.
1247 for (int i = nregular - 1; i >= 0; --i) {
1248 // appendWithRef bumps the refcount and splits if necessary,
1249 // to compensate for the upcoming pop from the stack
1250 ai.appendWithRef(tvAsVariant(stack.indTV(i)));
1252 for (int i = 0; i < nregular; ++i) {
1253 stack.popTV();
1255 assertx(stack.top() == (void*) ar);
1256 stack.pushStringNoRc(invName);
1257 for (ArrayIter iter(args); iter; ++iter) {
1258 ai.appendWithRef(iter.secondValPlus());
1260 if (RuntimeOption::EvalHackArrDVArrs) {
1261 stack.pushVecNoRc(ai.create());
1262 } else {
1263 stack.pushArrayNoRc(ai.create());
1268 ar->setNumArgs(2);
1269 ar->setVarEnv(nullptr);
1272 // offset is the number of params already on the stack to which the
1273 // contents of args are to be added; for call_user_func_array, this is
1274 // always 0; for unpacked arguments, it may be greater if normally passed
1275 // params precede the unpack.
1276 void prepareArrayArgs(ActRec* ar, const Cell args, Stack& stack,
1277 int nregular, bool checkRefAnnot) {
1278 assertx(!cellIsNull(&args));
1279 assertx(nregular >= 0);
1280 assertx((stack.top() + nregular) == (void*) ar);
1281 const Func* const f = ar->m_func;
1282 assertx(f);
1284 assertx(isContainer(args));
1285 int const nargs = nregular + getContainerSize(args);
1286 if (UNLIKELY(ar->magicDispatch())) {
1287 shuffleMagicArrayArgs(ar, args, stack, nregular);
1288 return;
1291 #define WRAP(e) \
1292 try { \
1293 e; \
1294 } catch (...) { \
1295 /* If the user error handler throws an exception, discard the
1296 * uninitialized value(s) at the top of the eval stack so that the
1297 * unwinder doesn't choke */ \
1298 stack.discard(); \
1299 throw; \
1302 int const nparams = f->numNonVariadicParams();
1303 int nextra_regular = std::max(nregular - nparams, 0);
1304 ArrayIter iter(args);
1305 if (LIKELY(nextra_regular == 0)) {
1306 for (int i = nregular; iter && (i < nparams); ++i, ++iter) {
1307 auto const from = iter.secondValPlus();
1308 TypedValue* to = stack.allocTV();
1309 if (LIKELY(!f->byRef(i))) {
1310 cellDup(tvToCell(from), *to);
1311 } else if (LIKELY(isRefType(from.m_type) &&
1312 from.m_data.pref->hasMultipleRefs())) {
1313 if (checkRefAnnot) {
1314 WRAP(throwParamRefMismatch(f, i));
1316 refDup(from, *to);
1317 } else {
1318 if (checkRefAnnot) {
1319 WRAP(throwParamRefMismatch(f, i));
1321 cellDup(tvToCell(from), *to);
1325 if (LIKELY(!iter)) {
1326 // argArray was exhausted, so there are no "extra" arguments but there
1327 // may be a deficit of non-variadic arguments, and the need to push an
1328 // empty array for the variadic argument ... that work is left to
1329 // prepareFuncEntry. Since the stack state is going to be considered
1330 // "trimmed" over there, we need to null the extraArgs/varEnv field if
1331 // the function could read it.
1332 ar->setNumArgs(nargs);
1333 ar->trashVarEnv();
1334 if (!debug || (ar->func()->attrs() & AttrMayUseVV)) {
1335 ar->setVarEnv(nullptr);
1337 return;
1341 #undef WRAP
1343 // there are "extra" arguments; passed as standard arguments prior to the
1344 // ... unpack operator and/or still remaining in argArray
1345 assertx(nargs > nparams);
1346 assertx(nextra_regular > 0 || !!iter);
1347 if (LIKELY(f->discardExtraArgs())) {
1348 if (UNLIKELY(nextra_regular > 0)) {
1349 // if unpacking, any regularly passed arguments on the stack
1350 // in excess of those expected by the function need to be discarded
1351 // in addition to the ones held in the arry
1352 do { stack.popTV(); } while (--nextra_regular);
1355 // the extra args are not used in the function; no reason to add them
1356 // to the stack
1357 ar->setNumArgs(f->numParams());
1358 return;
1361 auto const hasVarParam = f->hasVariadicCaptureParam();
1362 auto const extra = nargs - nparams;
1363 if (f->attrs() & AttrMayUseVV) {
1364 ExtraArgs* extraArgs = ExtraArgs::allocateUninit(extra);
1365 VArrayInit ai(extra);
1366 if (UNLIKELY(nextra_regular > 0)) {
1367 // The arguments are pushed in order, so we should refer them by
1368 // index instead of taking the top, that would lead to reverse order.
1369 for (int i = nextra_regular - 1; i >= 0; --i) {
1370 TypedValue* to = extraArgs->getExtraArg(nextra_regular - i - 1);
1371 const TypedValue* from = stack.indTV(i);
1372 if (isRefType(from->m_type) && from->m_data.pref->isReferenced()) {
1373 refCopy(*from, *to);
1374 } else {
1375 cellCopy(*tvToCell(from), *to);
1377 if (hasVarParam) {
1378 // appendWithRef bumps the refcount: this accounts for the fact
1379 // that the extra args values went from being present on the stack
1380 // to being in (both) ExtraArgs and the variadic args
1381 ai.appendWithRef(tvAsCVarRef(from));
1384 stack.ndiscard(nextra_regular);
1386 for (int i = nextra_regular; i < extra; ++i, ++iter) {
1387 TypedValue* to = extraArgs->getExtraArg(i);
1388 auto const from = iter.secondValPlus();
1389 tvDupWithRef(from, *to);
1390 if (hasVarParam) {
1391 ai.appendWithRef(from);
1394 assertx(!iter); // iter should now be exhausted
1395 if (hasVarParam) {
1396 auto const ad = ai.create();
1397 assertx(ad->hasExactlyOneRef());
1398 if (RuntimeOption::EvalHackArrDVArrs) {
1399 stack.pushVecNoRc(ad);
1400 } else {
1401 stack.pushArrayNoRc(ad);
1404 ar->setNumArgs(nargs);
1405 ar->setExtraArgs(extraArgs);
1406 } else {
1407 assertx(hasVarParam);
1408 if (nparams == nregular &&
1409 !RuntimeOption::EvalHackArrDVArrs &&
1410 isArrayType(args.m_type) &&
1411 args.m_data.parr->isVArray()) {
1412 stack.pushArray(args.m_data.parr);
1413 } else if (nparams == nregular &&
1414 RuntimeOption::EvalHackArrDVArrs &&
1415 isVecType(args.m_type)) {
1416 stack.pushVec(args.m_data.parr);
1417 } else {
1418 VArrayInit ai(extra);
1419 if (UNLIKELY(nextra_regular > 0)) {
1420 // The arguments are pushed in order, so we should refer them by
1421 // index instead of taking the top, that would lead to reverse order.
1422 for (int i = nextra_regular - 1; i >= 0; --i) {
1423 // appendWithRef bumps the refcount and splits if necessary,
1424 // to compensate for the upcoming pop from the stack
1425 ai.appendWithRef(tvAsVariant(stack.indTV(i)));
1427 for (int i = 0; i < nextra_regular; ++i) {
1428 stack.popTV();
1431 for (int i = nextra_regular; i < extra; ++i, ++iter) {
1432 // appendWithRef bumps the refcount to compensate for the
1433 // eventual decref of arrayArgs.
1434 ai.appendWithRef(iter.secondValPlus());
1436 assertx(!iter); // iter should now be exhausted
1437 auto const ad = ai.create();
1438 assertx(ad->hasExactlyOneRef());
1439 if (RuntimeOption::EvalHackArrDVArrs) {
1440 stack.pushVecNoRc(ad);
1441 } else {
1442 stack.pushArrayNoRc(ad);
1445 ar->setNumArgs(f->numParams());
1449 static void prepareFuncEntry(ActRec *ar, StackArgsState stk) {
1450 assertx(!ar->resumed());
1451 const Func* func = ar->m_func;
1452 Offset firstDVInitializer = InvalidAbsoluteOffset;
1453 bool raiseMissingArgumentWarnings = false;
1454 folly::Optional<uint32_t> raiseTooManyArgumentsWarnings;
1455 const int nparams = func->numNonVariadicParams();
1456 auto& stack = vmStack();
1457 ArrayData* reified_generics = nullptr;
1459 if (ar->m_func->hasReifiedGenerics()) {
1460 if (ar->hasReifiedGenerics()) {
1461 // This means that the first local is $0ReifiedGenerics
1462 reified_generics = ar->getReifiedGenerics();
1464 ar->trashReifiedGenerics();
1467 if (stk == StackArgsState::Trimmed &&
1468 (ar->func()->attrs() & AttrMayUseVV) &&
1469 ar->hasExtraArgs()) {
1470 assertx(nparams < ar->numArgs());
1471 } else if (UNLIKELY(ar->magicDispatch())) {
1472 // shuffleMagicArgs deals with everything. no need for further
1473 // argument munging
1474 shuffleMagicArgs(ar);
1475 } else {
1476 int nargs = ar->numArgs();
1477 if (UNLIKELY(nargs > nparams)) {
1478 if (LIKELY(stk != StackArgsState::Trimmed && func->discardExtraArgs())) {
1479 // In the common case, the function won't use the extra arguments,
1480 // so act as if they were never passed (NOTE: this has the effect
1481 // of slightly misleading backtraces that don't reflect the
1482 // discarded args)
1483 for (int i = nparams; i < nargs; ++i) { stack.popTV(); }
1484 ar->setNumArgs(nparams);
1485 } else if (stk == StackArgsState::Trimmed) {
1486 assertx(nargs == func->numParams());
1487 assertx(((TypedValue*)ar - stack.top()) == func->numParams());
1488 } else {
1489 shuffleExtraStackArgs(ar);
1491 raiseTooManyArgumentsWarnings = nargs;
1492 } else {
1493 if (nargs < nparams) {
1494 // Push uninitialized nulls for missing arguments. Some of them may
1495 // end up getting default-initialized, but regardless, we need to
1496 // make space for them on the stack.
1497 const Func::ParamInfoVec& paramInfo = func->params();
1498 for (int i = nargs; i < nparams; ++i) {
1499 stack.pushUninit();
1500 Offset dvInitializer = paramInfo[i].funcletOff;
1501 if (dvInitializer == InvalidAbsoluteOffset) {
1502 // We wait to raise warnings until after all the locals have been
1503 // initialized. This is important because things need to be in a
1504 // consistent state in case the user error handler throws.
1505 raiseMissingArgumentWarnings = true;
1506 } else if (firstDVInitializer == InvalidAbsoluteOffset) {
1507 // This is the first unpassed arg with a default value, so
1508 // this is where we'll need to jump to.
1509 firstDVInitializer = dvInitializer;
1513 if (UNLIKELY(func->hasVariadicCaptureParam())) {
1514 if (RuntimeOption::EvalHackArrDVArrs) {
1515 stack.pushVecNoRc(staticEmptyVecArray());
1516 } else {
1517 stack.pushArrayNoRc(staticEmptyVArray());
1520 if (func->attrs() & AttrMayUseVV) {
1521 ar->setVarEnv(nullptr);
1526 int nlocals = func->numParams();
1527 if (UNLIKELY(func->isClosureBody())) {
1528 int nuse = c_Closure::initActRecFromClosure(ar, stack.top());
1529 // initActRecFromClosure doesn't move stack
1530 stack.nalloc(nuse);
1531 nlocals += nuse;
1532 func = ar->m_func;
1535 if (ar->m_func->hasReifiedGenerics()) {
1536 // Currently does not work with closures
1537 assertx(!func->isClosureBody());
1538 if (!ar->hasReifiedGenerics()) {
1539 stack.pushUninit();
1540 } else {
1541 assertx(reified_generics != nullptr);
1542 // push for first local
1543 if (RuntimeOption::EvalHackArrDVArrs) {
1544 stack.pushVec(reified_generics);
1545 } else {
1546 stack.pushArray(reified_generics);
1549 nlocals++;
1552 pushFrameSlots(func, nlocals);
1554 vmfp() = ar;
1555 vmpc() = firstDVInitializer != InvalidAbsoluteOffset
1556 ? func->unit()->entry() + firstDVInitializer
1557 : func->getEntry();
1558 vmJitReturnAddr() = nullptr;
1560 // cppext functions/methods have their own logic for raising
1561 // warnings for missing arguments, so we only need to do this work
1562 // for non-cppext functions/methods
1563 if (raiseMissingArgumentWarnings && !func->isCPPBuiltin()) {
1564 HPHP::jit::raiseMissingArgument(func, ar->numArgs());
1566 if (raiseTooManyArgumentsWarnings && !func->isCPPBuiltin()) {
1567 // since shuffleExtraStackArgs changes ar->numArgs() we need to communicate
1568 // the value before it gets changed
1569 HPHP::jit::raiseTooManyArguments(func, *raiseTooManyArgumentsWarnings);
1573 namespace {
1574 // Check whether HasReifiedGenerics is set on the ActRec
1575 // Check whether the location of reified generics matches the one we expect
1576 void checkForReifiedGenericsErrors(const ActRec* ar) {
1577 if (!ar->m_func->hasReifiedGenerics()) return;
1578 if (!ar->hasReifiedGenerics()) {
1579 raise_error(Strings::REIFIED_GENERICS_NOT_GIVEN,
1580 ar->m_func->fullName()->data());
1582 auto const tv = frame_local(ar, ar->m_func->numParams());
1583 assertx(tv && (RuntimeOption::EvalHackArrDVArrs ? tvIsVec(tv)
1584 : tvIsArray(tv)));
1585 checkFunReifiedGenericMismatch(ar->m_func, tv->m_data.parr);
1587 } // namespace
1589 static void dispatch();
1591 void enterVMAtFunc(ActRec* enterFnAr, StackArgsState stk, VarEnv* varEnv) {
1592 assertx(enterFnAr);
1593 assertx(!enterFnAr->resumed());
1594 Stats::inc(Stats::VMEnter);
1596 const bool useJit = RID().getJit() && !RID().getJitFolding();
1597 const bool useJitPrologue = useJit && vmfp()
1598 && !enterFnAr->magicDispatch()
1599 && !varEnv
1600 && (stk != StackArgsState::Trimmed);
1601 // The jit prologues only know how to do limited amounts of work; cannot
1602 // be used for magic call/pseudo-main/extra-args already determined or
1603 // ... or if the stack args have been explicitly been prepared (e.g. via
1604 // entry as part of invoke func).
1606 if (LIKELY(useJitPrologue)) {
1607 const int np = enterFnAr->m_func->numNonVariadicParams();
1608 int na = enterFnAr->numArgs();
1609 if (na > np) na = np + 1;
1610 jit::TCA start = enterFnAr->m_func->getPrologue(na);
1611 jit::enterTCAtPrologue(enterFnAr, start);
1612 return;
1615 if (UNLIKELY(varEnv != nullptr)) {
1616 enterFnAr->setVarEnv(varEnv);
1617 assertx(enterFnAr->func()->isPseudoMain());
1618 pushFrameSlots(enterFnAr->func());
1619 auto oldFp = vmfp();
1620 if (UNLIKELY(oldFp && oldFp->skipFrame())) {
1621 oldFp = g_context->getPrevVMStateSkipFrame(oldFp);
1623 varEnv->enterFP(oldFp, enterFnAr);
1624 vmfp() = enterFnAr;
1625 vmpc() = enterFnAr->func()->getEntry();
1626 } else {
1627 prepareFuncEntry(enterFnAr, stk);
1630 if (!EventHook::FunctionCall(enterFnAr, EventHook::NormalFunc)) return;
1631 checkStack(vmStack(), enterFnAr->m_func, 0);
1632 checkForReifiedGenericsErrors(enterFnAr);
1633 calleeDynamicCallChecks(enterFnAr);
1634 checkForRequiredCallM(enterFnAr);
1635 assertx(vmfp()->func()->contains(vmpc()));
1637 if (useJit) {
1638 jit::TCA start = enterFnAr->m_func->getFuncBody();
1639 assert_flog(jit::tc::isValidCodeAddress(start),
1640 "start = {} ; func = {} ({})\n",
1641 start, enterFnAr->m_func, enterFnAr->m_func->fullName());
1642 jit::enterTCAfterPrologue(start);
1643 } else {
1644 dispatch();
1648 void enterVMAtCurPC() {
1649 assertx(vmfp());
1650 assertx(vmpc());
1651 assertx(vmfp()->func()->contains(vmpc()));
1652 Stats::inc(Stats::VMEnter);
1653 if (RID().getJit()) {
1654 jit::enterTC();
1655 } else {
1656 dispatch();
1661 * Helper for function entry, including pseudo-main entry.
1663 void pushFrameSlots(const Func* func, int nparams /*= 0*/) {
1664 // Push locals.
1665 for (int i = nparams; i < func->numLocals(); i++) {
1666 vmStack().pushUninit();
1668 // Push iterators.
1669 for (int i = 0; i < func->numIterators(); i++) {
1670 vmStack().allocI();
1674 void unwindPreventReturnToTC(ActRec* ar) {
1675 auto const savedRip = reinterpret_cast<jit::TCA>(ar->m_savedRip);
1676 always_assert_flog(jit::tc::isValidCodeAddress(savedRip),
1677 "preventReturnToTC({}): {} isn't in TC",
1678 ar, savedRip);
1680 if (isReturnHelper(savedRip)) return;
1682 auto& ustubs = jit::tc::ustubs();
1683 if (ar->resumed()) {
1684 // async functions use callToExit stub
1685 assertx(ar->func()->isGenerator());
1686 ar->setJitReturn(ar->func()->isAsync()
1687 ? ustubs.asyncGenRetHelper : ustubs.genRetHelper);
1688 } else {
1689 ar->setJitReturn(ustubs.retHelper);
1693 void debuggerPreventReturnToTC(ActRec* ar) {
1694 auto const savedRip = reinterpret_cast<jit::TCA>(ar->m_savedRip);
1695 always_assert_flog(jit::tc::isValidCodeAddress(savedRip),
1696 "preventReturnToTC({}): {} isn't in TC",
1697 ar, savedRip);
1699 if (isReturnHelper(savedRip) || isDebuggerReturnHelper(savedRip)) return;
1701 // We're going to smash the return address. Before we do, save the catch
1702 // block attached to the call in a side table so the return helpers and
1703 // unwinder can find it when needed.
1704 jit::stashDebuggerCatch(ar);
1706 auto& ustubs = jit::tc::ustubs();
1707 if (ar->resumed()) {
1708 // async functions use callToExit stub
1709 assertx(ar->func()->isGenerator());
1710 ar->setJitReturn(ar->func()->isAsync()
1711 ? ustubs.debuggerAsyncGenRetHelper : ustubs.debuggerGenRetHelper);
1712 } else {
1713 ar->setJitReturn(ustubs.debuggerRetHelper);
1717 // Walk the stack and find any return address to jitted code and bash it to the
1718 // appropriate RetFromInterpreted*Frame helper. This ensures that we don't
1719 // return into jitted code and gives the system the proper chance to interpret
1720 // blacklisted tracelets.
1721 void debuggerPreventReturnsToTC() {
1722 assertx(isDebuggerAttached());
1723 if (!RuntimeOption::EvalJit) return;
1725 auto& ec = *g_context;
1726 for (auto ar = vmfp(); ar; ar = ec.getPrevVMState(ar)) {
1727 debuggerPreventReturnToTC(ar);
1731 static inline StringData* lookup_name(TypedValue* key) {
1732 return prepareKey(*key);
1735 static inline void lookup_gbl(ActRec* /*fp*/, StringData*& name,
1736 TypedValue* key, TypedValue*& val) {
1737 name = lookup_name(key);
1738 assertx(g_context->m_globalVarEnv);
1739 val = g_context->m_globalVarEnv->lookup(name);
1742 static inline void lookupd_gbl(ActRec* /*fp*/, StringData*& name,
1743 TypedValue* key, TypedValue*& val) {
1744 name = lookup_name(key);
1745 assertx(g_context->m_globalVarEnv);
1746 VarEnv* varEnv = g_context->m_globalVarEnv;
1747 val = varEnv->lookup(name);
1748 if (val == nullptr) {
1749 TypedValue tv;
1750 tvWriteNull(tv);
1751 varEnv->set(name, &tv);
1752 val = varEnv->lookup(name);
1756 static inline void lookup_sprop(ActRec* fp,
1757 Class* cls,
1758 StringData*& name,
1759 TypedValue* key,
1760 TypedValue*& val,
1761 Slot& slot,
1762 bool& visible,
1763 bool& accessible,
1764 bool& constant,
1765 bool ignoreLateInit) {
1766 name = lookup_name(key);
1767 auto const ctx = arGetContextClass(fp);
1769 auto const lookup = ignoreLateInit
1770 ? cls->getSPropIgnoreLateInit(ctx, name)
1771 : cls->getSProp(ctx, name);
1773 val = lookup.val;
1774 slot = lookup.slot;
1775 visible = lookup.val != nullptr;
1776 constant = lookup.constant;
1777 accessible = lookup.accessible;
1780 static inline Class* lookupClsRef(Cell* input) {
1781 Class* class_ = nullptr;
1782 if (isStringType(input->m_type)) {
1783 class_ = Unit::loadClass(input->m_data.pstr);
1784 if (class_ == nullptr) {
1785 raise_error(Strings::UNKNOWN_CLASS, input->m_data.pstr->data());
1787 } else if (input->m_type == KindOfObject) {
1788 class_ = input->m_data.pobj->getVMClass();
1789 } else if (isClassType(input->m_type)) {
1790 class_ = input->m_data.pclass;
1791 } else {
1792 raise_error("Cls: Expected string or object");
1794 return class_;
1797 static UNUSED int innerCount(TypedValue tv) {
1798 return isRefcountedType(tv.m_type) ? tvGetCount(tv) : -1;
1801 static inline tv_lval ratchetRefs(tv_lval result,
1802 TypedValue& tvRef,
1803 TypedValue& tvRef2) {
1804 TRACE(5, "Ratchet: result %p(k%d c%d), ref %p(k%d c%d) ref2 %p(k%d c%d)\n",
1805 &val(result), static_cast<data_type_t>(result.type()),
1806 innerCount(*result),
1807 &tvRef, static_cast<data_type_t>(tvRef.m_type), innerCount(tvRef),
1808 &tvRef2, static_cast<data_type_t>(tvRef2.m_type), innerCount(tvRef2));
1809 // Due to complications associated with ArrayAccess, it is possible to acquire
1810 // a reference as a side effect of vector operation processing. Such a
1811 // reference must be retained until after the next iteration is complete.
1812 // Therefore, move the reference from tvRef to tvRef2, so that the reference
1813 // will be released one iteration later. But only do this if tvRef was used in
1814 // this iteration, otherwise we may wipe out the last reference to something
1815 // that we need to stay alive until the next iteration.
1816 if (tvRef.m_type != KindOfUninit) {
1817 if (isRefcountedType(tvRef2.m_type)) {
1818 tvDecRefCountable(&tvRef2);
1819 TRACE(5, "Ratchet: decref tvref2\n");
1820 tvWriteUninit(tvRef2);
1823 memcpy(&tvRef2, &tvRef, sizeof(TypedValue));
1824 tvWriteUninit(tvRef);
1825 // Update result to point to relocated reference. This can be done
1826 // unconditionally here because we maintain the invariant throughout that
1827 // either tvRef is KindOfUninit, or tvRef contains a valid object that
1828 // result points to.
1829 assertx(&val(result) == &tvRef.m_data);
1830 return tv_lval(&tvRef2);
1833 assertx(&val(result) != &tvRef.m_data);
1834 return result;
1838 * One iop* function exists for every bytecode. They all take a single PC&
1839 * argument, which should be left pointing to the next bytecode to execute when
1840 * the instruction is complete. Most return void, though a few return a
1841 * jit::TCA. The ones that return a TCA return a non-nullptr value to indicate
1842 * that the caller must resume execution in the TC at the returned
1843 * address. This is used to maintain certain invariants about how we get into
1844 * and out of VM frames in jitted code; see comments on jitReturnPre() for more
1845 * details.
1848 OPTBLD_INLINE void iopNop() {
1851 OPTBLD_INLINE void iopEntryNop() {
1854 OPTBLD_INLINE void iopPopC() {
1855 vmStack().popC();
1858 OPTBLD_INLINE void iopPopV() {
1859 vmStack().popV();
1862 OPTBLD_INLINE void iopPopU() {
1863 vmStack().popU();
1866 OPTBLD_INLINE void iopPopU2() {
1867 assertx(vmStack().indC(1)->m_type == KindOfUninit);
1868 *vmStack().indC(1) = *vmStack().topC();
1869 vmStack().discard();
1872 OPTBLD_INLINE void iopPopFrame(uint32_t nout) {
1873 assertx(vmStack().indC(nout + 0)->m_type == KindOfUninit);
1874 assertx(vmStack().indC(nout + 1)->m_type == KindOfUninit);
1875 assertx(vmStack().indC(nout + 2)->m_type == KindOfUninit);
1876 for (int32_t i = nout - 1; i >= 0; --i) {
1877 *vmStack().indC(i + 3) = *vmStack().indC(i);
1879 vmStack().ndiscard(3);
1882 OPTBLD_INLINE void iopPopL(local_var to) {
1883 assertx(to.index < vmfp()->m_func->numLocals());
1884 Cell* fr = vmStack().topC();
1885 if (isRefType(to->m_type) || vmfp()->m_func->isPseudoMain()) {
1886 // Manipulate the ref-counts as if this was a SetL, PopC pair to preserve
1887 // destructor ordering.
1888 tvSet(*fr, *to);
1889 vmStack().popC();
1890 } else {
1891 cellMove(*fr, *to);
1892 vmStack().discard();
1896 OPTBLD_INLINE void iopDup() {
1897 vmStack().dup();
1900 OPTBLD_INLINE void iopCGetCUNop() {
1903 OPTBLD_INLINE void iopUGetCUNop() {
1906 OPTBLD_INLINE void iopNull() {
1907 vmStack().pushNull();
1910 OPTBLD_INLINE void iopNullUninit() {
1911 vmStack().pushNullUninit();
1914 OPTBLD_INLINE void iopTrue() {
1915 vmStack().pushBool(true);
1918 OPTBLD_INLINE void iopFalse() {
1919 vmStack().pushBool(false);
1922 OPTBLD_INLINE void iopFile() {
1923 auto s = vmfp()->m_func->unit()->filepath();
1924 vmStack().pushStaticString(s);
1927 OPTBLD_INLINE void iopDir() {
1928 auto s = vmfp()->m_func->unit()->dirpath();
1929 vmStack().pushStaticString(s);
1932 OPTBLD_INLINE void iopMethod() {
1933 auto s = vmfp()->m_func->fullName();
1934 vmStack().pushStaticString(s);
1937 OPTBLD_INLINE void iopFuncCred() {
1938 vmStack().pushObjectNoRc(
1939 FunctionCredential::newInstance(vmfp()->m_func));
1942 OPTBLD_INLINE void iopClassName() {
1943 auto const cls = vmStack().topC();
1944 if (!isClassType(cls->m_type)) {
1945 raise_error("Attempting to get name of non-class");
1947 vmStack().replaceC<KindOfPersistentString>(
1948 cls->m_data.pclass->name()
1952 OPTBLD_INLINE void iopInt(int64_t imm) {
1953 vmStack().pushInt(imm);
1956 OPTBLD_INLINE void iopDouble(double imm) {
1957 vmStack().pushDouble(imm);
1960 OPTBLD_INLINE void iopString(const StringData* s) {
1961 vmStack().pushStaticString(s);
1964 OPTBLD_INLINE void iopArray(const ArrayData* a) {
1965 assertx(a->isPHPArray());
1966 assertx(!RuntimeOption::EvalHackArrDVArrs || a->isNotDVArray());
1967 vmStack().pushStaticArray(a);
1970 OPTBLD_INLINE void iopDict(const ArrayData* a) {
1971 assertx(a->isDict());
1972 vmStack().pushStaticDict(a);
1975 OPTBLD_INLINE void iopKeyset(const ArrayData* a) {
1976 assertx(a->isKeyset());
1977 vmStack().pushStaticKeyset(a);
1980 OPTBLD_INLINE void iopVec(const ArrayData* a) {
1981 assertx(a->isVecArray());
1982 vmStack().pushStaticVec(a);
1985 OPTBLD_INLINE void iopNewArray(uint32_t capacity) {
1986 if (capacity == 0) {
1987 vmStack().pushArrayNoRc(staticEmptyArray());
1988 } else {
1989 vmStack().pushArrayNoRc(PackedArray::MakeReserve(capacity));
1993 OPTBLD_INLINE void iopNewMixedArray(uint32_t capacity) {
1994 if (capacity == 0) {
1995 vmStack().pushArrayNoRc(staticEmptyArray());
1996 } else {
1997 vmStack().pushArrayNoRc(MixedArray::MakeReserveMixed(capacity));
2001 OPTBLD_INLINE void iopNewDictArray(uint32_t capacity) {
2002 auto const ad = capacity == 0
2003 ? staticEmptyDictArray()
2004 : MixedArray::MakeReserveDict(capacity);
2006 if (RuntimeOption::EvalArrayProvenance) {
2007 if (auto const pctag = arrprov::tagFromProgramCounter()) {
2008 arrprov::setTag(ad, *pctag);
2011 vmStack().pushDictNoRc(ad);
2014 OPTBLD_INLINE
2015 void iopNewLikeArrayL(local_var fr, uint32_t capacity) {
2016 ArrayData* arr;
2017 if (LIKELY(isArrayType(fr->m_type))) {
2018 arr = MixedArray::MakeReserveLike(fr->m_data.parr, capacity);
2019 } else {
2020 if (capacity == 0) capacity = PackedArray::SmallSize;
2021 arr = PackedArray::MakeReserve(capacity);
2023 vmStack().pushArrayNoRc(arr);
2026 OPTBLD_INLINE void iopNewPackedArray(uint32_t n) {
2027 // This constructor moves values, no inc/decref is necessary.
2028 auto* a = PackedArray::MakePacked(n, vmStack().topC());
2029 vmStack().ndiscard(n);
2030 vmStack().pushArrayNoRc(a);
2033 namespace {
2035 template <typename F>
2036 ArrayData* newStructArrayImpl(imm_array<int32_t> ids, F f) {
2037 auto const n = ids.size;
2038 assertx(n > 0 && n <= ArrayData::MaxElemsOnStack);
2039 req::vector<const StringData*> names;
2040 names.reserve(n);
2041 auto unit = vmfp()->m_func->unit();
2042 for (size_t i = 0; i < n; ++i) {
2043 auto name = unit->lookupLitstrId(ids[i]);
2044 names.push_back(name);
2047 // This constructor moves values, no inc/decref is necessary.
2048 auto const a = f(n, names.data(), vmStack().topC())->asArrayData();
2049 vmStack().ndiscard(n);
2050 return a;
2055 OPTBLD_INLINE void iopNewStructArray(imm_array<int32_t> ids) {
2056 auto const a = newStructArrayImpl(ids, MixedArray::MakeStruct);
2057 vmStack().pushArrayNoRc(a);
2060 OPTBLD_INLINE void iopNewStructDArray(imm_array<int32_t> ids) {
2061 assertx(!RuntimeOption::EvalHackArrDVArrs);
2062 auto const a = newStructArrayImpl(ids, MixedArray::MakeStructDArray);
2063 vmStack().pushArrayNoRc(a);
2066 OPTBLD_INLINE void iopNewStructDict(imm_array<int32_t> ids) {
2067 auto const a = newStructArrayImpl(ids, MixedArray::MakeStructDict);
2068 vmStack().pushDictNoRc(a);
2071 OPTBLD_INLINE void iopNewVecArray(uint32_t n) {
2072 // This constructor moves values, no inc/decref is necessary.
2073 auto const a = PackedArray::MakeVec(n, vmStack().topC());
2074 if (RuntimeOption::EvalArrayProvenance) {
2075 if (auto const pctag = arrprov::tagFromProgramCounter()) {
2076 arrprov::setTag(a, *pctag);
2079 vmStack().ndiscard(n);
2080 vmStack().pushVecNoRc(a);
2083 OPTBLD_INLINE void iopNewKeysetArray(uint32_t n) {
2084 // This constructor moves values, no inc/decref is necessary.
2085 auto* a = SetArray::MakeSet(n, vmStack().topC());
2086 vmStack().ndiscard(n);
2087 vmStack().pushKeysetNoRc(a);
2090 OPTBLD_INLINE void iopNewVArray(uint32_t n) {
2091 assertx(!RuntimeOption::EvalHackArrDVArrs);
2092 // This constructor moves values, no inc/decref is necessary.
2093 auto a = PackedArray::MakeVArray(n, vmStack().topC());
2094 vmStack().ndiscard(n);
2095 vmStack().pushArrayNoRc(a);
2098 OPTBLD_INLINE void iopNewDArray(uint32_t capacity) {
2099 assertx(!RuntimeOption::EvalHackArrDVArrs);
2100 if (capacity == 0) {
2101 vmStack().pushArrayNoRc(staticEmptyDArray());
2102 } else {
2103 vmStack().pushArrayNoRc(MixedArray::MakeReserveDArray(capacity));
2107 // TODO (T29595301): Use id instead of StringData
2108 OPTBLD_INLINE void iopNewRecord(const StringData* s, imm_array<int32_t> ids) {
2109 auto rec = Unit::getRecordDesc(s, true);
2110 if (!rec) {
2111 raise_error(Strings::UNKNOWN_RECORD, s->data());
2113 auto const n = ids.size;
2114 assertx(n > 0 && n <= ArrayData::MaxElemsOnStack);
2115 req::vector<const StringData*> names;
2116 names.reserve(n);
2117 auto const unit = vmfp()->m_func->unit();
2118 for (size_t i = 0; i < n; ++i) {
2119 auto name = unit->lookupLitstrId(ids[i]);
2120 names.push_back(name);
2122 auto recdata =
2123 RecordData::newRecord(rec, names.size(), names.data(), vmStack().topC());
2124 vmStack().ndiscard(n);
2125 vmStack().pushRecordNoRc(recdata);
2128 OPTBLD_INLINE void iopAddElemC() {
2129 Cell* c1 = vmStack().topC();
2130 Cell* c2 = vmStack().indC(1);
2131 Cell* c3 = vmStack().indC(2);
2132 if (!isArrayType(c3->m_type) && !isDictType(c3->m_type)) {
2133 raise_error("AddElemC: $3 must be an array or dict");
2135 if (c2->m_type == KindOfInt64) {
2136 cellAsVariant(*c3).asArrRef().set(c2->m_data.num, tvAsCVarRef(c1));
2137 } else {
2138 cellAsVariant(*c3).asArrRef().set(tvAsCVarRef(c2), tvAsCVarRef(c1));
2140 vmStack().popC();
2141 vmStack().popC();
2144 OPTBLD_INLINE void iopAddNewElemC() {
2145 Cell* c1 = vmStack().topC();
2146 Cell* c2 = vmStack().indC(1);
2147 if (isArrayType(c2->m_type)) {
2148 cellAsVariant(*c2).asArrRef().append(tvAsCVarRef(c1));
2149 } else if (isVecType(c2->m_type)) {
2150 auto in = c2->m_data.parr;
2151 auto out = PackedArray::AppendVec(in, *c1);
2152 if (in != out) decRefArr(in);
2153 c2->m_type = KindOfVec;
2154 c2->m_data.parr = out;
2155 } else if (isKeysetType(c2->m_type)) {
2156 auto in = c2->m_data.parr;
2157 auto out = SetArray::Append(in, *c1);
2158 if (in != out) decRefArr(in);
2159 c2->m_type = KindOfKeyset;
2160 c2->m_data.parr = out;
2161 } else {
2162 raise_error("AddNewElemC: $2 must be an array, vec, or keyset");
2164 assertx(cellIsPlausible(*c2));
2165 vmStack().popC();
2168 OPTBLD_INLINE void iopNewCol(CollectionType cType) {
2169 assertx(cType != CollectionType::Pair);
2170 // Incref the collection object during construction.
2171 auto obj = collections::alloc(cType);
2172 vmStack().pushObjectNoRc(obj);
2175 OPTBLD_INLINE void iopNewPair() {
2176 Cell* c1 = vmStack().topC();
2177 Cell* c2 = vmStack().indC(1);
2178 // elements were pushed onto the stack in the order they should appear
2179 // in the pair, so the top of the stack should become the second element
2180 auto pair = collections::allocPair(*c2, *c1);
2181 // This constructor moves values, no inc/decref is necessary.
2182 vmStack().ndiscard(2);
2183 vmStack().pushObjectNoRc(pair);
2186 OPTBLD_INLINE void iopColFromArray(CollectionType cType) {
2187 assertx(cType != CollectionType::Pair);
2188 auto const c1 = vmStack().topC();
2189 if (cType == CollectionType::Vector || cType == CollectionType::ImmVector) {
2190 if (UNLIKELY(!isVecType(c1->m_type))) {
2191 raise_error("ColFromArray: $1 must be a Vec when creating an "
2192 "(Imm)Vector");
2194 } else if (UNLIKELY(!isDictType(c1->m_type))) {
2195 raise_error("ColFromArray: $1 must be a Dict when creating an (Imm)Set "
2196 "or an (Imm)Map");
2198 // This constructor reassociates the ArrayData with the collection, so no
2199 // inc/decref is needed for the array. The collection object itself is
2200 // increfed.
2201 auto obj = collections::alloc(cType, c1->m_data.parr);
2202 vmStack().discard();
2203 vmStack().pushObjectNoRc(obj);
2206 OPTBLD_INLINE void iopCnsE(const StringData* s) {
2207 auto const cns = Unit::loadCns(s);
2208 if (cns == nullptr) {
2209 raise_error("Undefined constant '%s'", s->data());
2211 auto const c1 = vmStack().allocC();
2212 cellDup(*cns, *c1);
2215 OPTBLD_INLINE void iopDefCns(const StringData* s) {
2216 bool result = Unit::defCns(s, vmStack().topTV());
2217 vmStack().replaceTV<KindOfBoolean>(result);
2220 OPTBLD_INLINE void iopClsCns(const StringData* clsCnsName) {
2221 auto const clsTV = vmStack().topC();
2222 if (!isClassType(clsTV->m_type)) {
2223 raise_error("Attempting class constant access on non-class");
2226 auto const cls = clsTV->m_data.pclass;
2227 auto const clsCns = cls->clsCnsGet(clsCnsName);
2229 if (clsCns.m_type == KindOfUninit) {
2230 raise_error("Couldn't find constant %s::%s",
2231 cls->name()->data(), clsCnsName->data());
2234 cellDup(clsCns, *clsTV);
2237 OPTBLD_INLINE void iopClsCnsD(const StringData* clsCnsName, Id classId) {
2238 const NamedEntityPair& classNamedEntity =
2239 vmfp()->m_func->unit()->lookupNamedEntityPairId(classId);
2240 auto const clsCns = g_context->lookupClsCns(classNamedEntity.second,
2241 classNamedEntity.first, clsCnsName);
2242 auto const c1 = vmStack().allocC();
2243 cellDup(clsCns, *c1);
2246 OPTBLD_FLT_INLINE void iopConcat() {
2247 auto const c1 = vmStack().topC();
2248 auto const c2 = vmStack().indC(1);
2249 auto const s2 = cellAsVariant(*c2).toString();
2250 auto const s1 = cellAsCVarRef(*c1).toString();
2251 cellAsVariant(*c2) = concat(s2, s1);
2252 assertx(c2->m_data.pstr->checkCount());
2253 vmStack().popC();
2256 OPTBLD_INLINE void iopConcatN(uint32_t n) {
2257 auto const c1 = vmStack().topC();
2258 auto const c2 = vmStack().indC(1);
2260 if (n == 2) {
2261 auto const s2 = cellAsVariant(*c2).toString();
2262 auto const s1 = cellAsCVarRef(*c1).toString();
2263 cellAsVariant(*c2) = concat(s2, s1);
2264 assertx(c2->m_data.pstr->checkCount());
2265 } else if (n == 3) {
2266 auto const c3 = vmStack().indC(2);
2267 auto const s3 = cellAsVariant(*c3).toString();
2268 auto const s2 = cellAsCVarRef(*c2).toString();
2269 auto const s1 = cellAsCVarRef(*c1).toString();
2270 cellAsVariant(*c3) = concat3(s3, s2, s1);
2271 assertx(c3->m_data.pstr->checkCount());
2272 } else {
2273 assertx(n == 4);
2274 auto const c3 = vmStack().indC(2);
2275 auto const c4 = vmStack().indC(3);
2276 auto const s4 = cellAsVariant(*c4).toString();
2277 auto const s3 = cellAsCVarRef(*c3).toString();
2278 auto const s2 = cellAsCVarRef(*c2).toString();
2279 auto const s1 = cellAsCVarRef(*c1).toString();
2280 cellAsVariant(*c4) = concat4(s4, s3, s2, s1);
2281 assertx(c4->m_data.pstr->checkCount());
2284 for (int i = 1; i < n; ++i) {
2285 vmStack().popC();
2289 OPTBLD_INLINE void iopNot() {
2290 Cell* c1 = vmStack().topC();
2291 cellAsVariant(*c1) = !cellAsVariant(*c1).toBoolean();
2294 template<class Fn>
2295 OPTBLD_INLINE void implCellBinOp(Fn fn) {
2296 auto const c1 = vmStack().topC();
2297 auto const c2 = vmStack().indC(1);
2298 auto const result = fn(*c2, *c1);
2299 tvDecRefGen(c2);
2300 *c2 = result;
2301 vmStack().popC();
2304 template<class Fn>
2305 OPTBLD_INLINE void implCellBinOpBool(Fn fn) {
2306 auto const c1 = vmStack().topC();
2307 auto const c2 = vmStack().indC(1);
2308 bool const result = fn(*c2, *c1);
2309 tvDecRefGen(c2);
2310 *c2 = make_tv<KindOfBoolean>(result);
2311 vmStack().popC();
2314 template<class Fn>
2315 OPTBLD_INLINE void implCellBinOpInt64(Fn fn) {
2316 auto const c1 = vmStack().topC();
2317 auto const c2 = vmStack().indC(1);
2318 auto const result = fn(*c2, *c1);
2319 tvDecRefGen(c2);
2320 *c2 = make_tv<KindOfInt64>(result);
2321 vmStack().popC();
2324 OPTBLD_INLINE void iopAdd() {
2325 implCellBinOp(cellAdd);
2328 OPTBLD_INLINE void iopSub() {
2329 implCellBinOp(cellSub);
2332 OPTBLD_INLINE void iopMul() {
2333 implCellBinOp(cellMul);
2336 OPTBLD_INLINE void iopAddO() {
2337 implCellBinOp(cellAddO);
2340 OPTBLD_INLINE void iopSubO() {
2341 implCellBinOp(cellSubO);
2344 OPTBLD_INLINE void iopMulO() {
2345 implCellBinOp(cellMulO);
2348 OPTBLD_INLINE void iopDiv() {
2349 implCellBinOp(cellDiv);
2352 OPTBLD_INLINE void iopPow() {
2353 implCellBinOp(cellPow);
2356 OPTBLD_INLINE void iopMod() {
2357 implCellBinOp(cellMod);
2360 OPTBLD_INLINE void iopBitAnd() {
2361 implCellBinOp(cellBitAnd);
2364 OPTBLD_INLINE void iopBitOr() {
2365 implCellBinOp(cellBitOr);
2368 OPTBLD_INLINE void iopBitXor() {
2369 implCellBinOp(cellBitXor);
2372 OPTBLD_INLINE void iopXor() {
2373 implCellBinOpBool([&] (Cell c1, Cell c2) -> bool {
2374 return cellToBool(c1) ^ cellToBool(c2);
2378 OPTBLD_INLINE void iopSame() {
2379 implCellBinOpBool(cellSame);
2382 OPTBLD_INLINE void iopNSame() {
2383 implCellBinOpBool([&] (Cell c1, Cell c2) {
2384 return !cellSame(c1, c2);
2388 OPTBLD_INLINE void iopEq() {
2389 implCellBinOpBool([&] (Cell c1, Cell c2) {
2390 return cellEqual(c1, c2);
2394 OPTBLD_INLINE void iopNeq() {
2395 implCellBinOpBool([&] (Cell c1, Cell c2) {
2396 return !cellEqual(c1, c2);
2400 OPTBLD_INLINE void iopLt() {
2401 implCellBinOpBool([&] (Cell c1, Cell c2) {
2402 return cellLess(c1, c2);
2406 OPTBLD_INLINE void iopLte() {
2407 implCellBinOpBool(cellLessOrEqual);
2410 OPTBLD_INLINE void iopGt() {
2411 implCellBinOpBool([&] (Cell c1, Cell c2) {
2412 return cellGreater(c1, c2);
2416 OPTBLD_INLINE void iopGte() {
2417 implCellBinOpBool(cellGreaterOrEqual);
2420 OPTBLD_INLINE void iopCmp() {
2421 implCellBinOpInt64([&] (Cell c1, Cell c2) {
2422 return cellCompare(c1, c2);
2426 OPTBLD_INLINE void iopShl() {
2427 implCellBinOp(cellShl);
2430 OPTBLD_INLINE void iopShr() {
2431 implCellBinOp(cellShr);
2434 OPTBLD_INLINE void iopBitNot() {
2435 cellBitNot(*vmStack().topC());
2438 OPTBLD_INLINE void iopCastBool() {
2439 Cell* c1 = vmStack().topC();
2440 tvCastToBooleanInPlace(c1);
2443 OPTBLD_INLINE void iopCastInt() {
2444 Cell* c1 = vmStack().topC();
2445 tvCastToInt64InPlace(c1);
2448 OPTBLD_INLINE void iopCastDouble() {
2449 Cell* c1 = vmStack().topC();
2450 tvCastToDoubleInPlace(c1);
2453 OPTBLD_INLINE void iopCastString() {
2454 Cell* c1 = vmStack().topC();
2455 tvCastToStringInPlace(c1);
2458 OPTBLD_INLINE void iopCastArray() {
2459 Cell* c1 = vmStack().topC();
2460 tvCastToArrayInPlace(c1);
2463 OPTBLD_INLINE void iopCastObject() {
2464 Cell* c1 = vmStack().topC();
2465 tvCastToObjectInPlace(c1);
2468 OPTBLD_INLINE void iopCastDict() {
2469 Cell* c1 = vmStack().topC();
2470 tvCastToDictInPlace(c1);
2473 OPTBLD_INLINE void iopCastKeyset() {
2474 Cell* c1 = vmStack().topC();
2475 tvCastToKeysetInPlace(c1);
2478 OPTBLD_INLINE void iopCastVec() {
2479 Cell* c1 = vmStack().topC();
2480 tvCastToVecInPlace(c1);
2483 OPTBLD_INLINE void iopCastVArray() {
2484 assertx(!RuntimeOption::EvalHackArrDVArrs);
2485 Cell* c1 = vmStack().topC();
2486 tvCastToVArrayInPlace(c1);
2489 OPTBLD_INLINE void iopCastDArray() {
2490 assertx(!RuntimeOption::EvalHackArrDVArrs);
2491 Cell* c1 = vmStack().topC();
2492 tvCastToDArrayInPlace(c1);
2495 OPTBLD_INLINE void iopDblAsBits() {
2496 auto c = vmStack().topC();
2497 if (UNLIKELY(!isDoubleType(c->m_type))) {
2498 vmStack().replaceC<KindOfInt64>(0);
2499 return;
2501 c->m_type = KindOfInt64;
2504 ALWAYS_INLINE
2505 bool implInstanceOfHelper(const StringData* str1, Cell* c2) {
2506 const NamedEntity* rhs = NamedEntity::get(str1, false);
2507 // Because of other codepaths, an un-normalized name might enter the
2508 // table without a Class* so we need to check if it's there.
2509 if (LIKELY(rhs && rhs->getCachedClass() != nullptr)) {
2510 return cellInstanceOf(c2, rhs);
2512 return false;
2515 OPTBLD_INLINE void iopInstanceOf() {
2516 Cell* c1 = vmStack().topC(); // c2 instanceof c1
2517 Cell* c2 = vmStack().indC(1);
2518 bool r = false;
2519 if (isStringType(c1->m_type)) {
2520 r = implInstanceOfHelper(c1->m_data.pstr, c2);
2521 } else if (c1->m_type == KindOfObject) {
2522 if (c2->m_type == KindOfObject) {
2523 ObjectData* lhs = c2->m_data.pobj;
2524 ObjectData* rhs = c1->m_data.pobj;
2525 r = lhs->instanceof(rhs->getVMClass());
2527 } else if (isClassType(c1->m_type)) {
2528 // TODO (T29639296) Exploit class pointer further
2529 r = implInstanceOfHelper(c1->m_data.pclass->name(), c2);
2530 } else {
2531 raise_error("Class name must be a valid object or a string");
2533 vmStack().popC();
2534 vmStack().replaceC<KindOfBoolean>(r);
2537 OPTBLD_INLINE void iopInstanceOfD(Id id) {
2538 const NamedEntity* ne = vmfp()->m_func->unit()->lookupNamedEntityId(id);
2539 Cell* c1 = vmStack().topC();
2540 bool r = cellInstanceOf(c1, ne);
2541 vmStack().replaceC<KindOfBoolean>(r);
2544 OPTBLD_INLINE void iopIsLateBoundCls() {
2545 auto const cls = frameStaticClass(vmfp());
2546 if (!cls) {
2547 raise_error(HPHP::Strings::THIS_OUTSIDE_CLASS);
2549 if (isTrait(cls)) {
2550 raise_error("\"is\" and \"as\" operators cannot be used with a trait");
2552 auto const c1 = vmStack().topC();
2553 bool r = cellInstanceOf(c1, cls);
2554 vmStack().replaceC<KindOfBoolean>(r);
2557 namespace {
2559 ArrayData* resolveAndVerifyTypeStructureHelper(
2560 uint32_t n, const TypedValue* values, bool suppress, bool isOrAsOp) {
2561 Class* declaringCls = nullptr;
2562 Class* calledCls = nullptr;
2563 auto const v = *values;
2564 isValidTSType(v, true);
2565 if (typeStructureCouldBeNonStatic(v.m_data.parr)) {
2566 auto const frame = vmfp();
2567 if (frame && frame->func()) {
2568 declaringCls = frame->func()->cls();
2569 if (declaringCls) {
2570 calledCls = frame->hasClass()
2571 ? frame->getClass()
2572 : frame->getThis()->getVMClass();
2576 return jit::resolveTypeStructHelper(n, values, declaringCls,
2577 calledCls, suppress, isOrAsOp);
2580 ALWAYS_INLINE ArrayData* maybeResolveAndErrorOnTypeStructure(
2581 TypeStructResolveOp op,
2582 bool suppress
2584 auto const a = vmStack().topC();
2585 isValidTSType(*a, true);
2587 if (op == TypeStructResolveOp::Resolve) {
2588 return resolveAndVerifyTypeStructureHelper(1, vmStack().topC(),
2589 suppress, true);
2591 errorOnIsAsExpressionInvalidTypes(ArrNR(a->m_data.parr), false);
2592 return a->m_data.parr;
2595 } // namespace
2597 OPTBLD_INLINE void iopIsTypeStructC(TypeStructResolveOp op) {
2598 auto const c = vmStack().indC(1);
2599 auto const ts = maybeResolveAndErrorOnTypeStructure(op, true);
2600 auto b = checkTypeStructureMatchesCell(ArrNR(ts), *c);
2601 vmStack().popC(); // pop ts
2602 vmStack().replaceC<KindOfBoolean>(b);
2605 OPTBLD_INLINE void iopThrowAsTypeStructException() {
2606 auto const c = vmStack().indC(1);
2607 auto const ts =
2608 maybeResolveAndErrorOnTypeStructure(TypeStructResolveOp::Resolve, false);
2609 std::string givenType, expectedType, errorKey;
2610 if (!checkTypeStructureMatchesCell(ArrNR(ts), *c, givenType, expectedType,
2611 errorKey)) {
2612 vmStack().popC(); // pop ts
2613 throwTypeStructureDoesNotMatchCellException(
2614 givenType, expectedType, errorKey);
2616 raise_error("Invalid bytecode sequence: Instruction must throw");
2619 OPTBLD_INLINE void iopCombineAndResolveTypeStruct(uint32_t n) {
2620 assertx(n != 0);
2621 auto const resolved =
2622 resolveAndVerifyTypeStructureHelper(n, vmStack().topC(), false, false);
2623 vmStack().popC(); // pop the first TS
2624 vmStack().ndiscard(n-1);
2625 if (RuntimeOption::EvalHackArrDVArrs) {
2626 vmStack().pushDict(resolved);
2627 } else {
2628 vmStack().pushArray(resolved);
2632 OPTBLD_INLINE void iopRecordReifiedGeneric() {
2633 auto const tsList = vmStack().topC();
2634 if (RuntimeOption::EvalHackArrDVArrs ?
2635 !tvIsVec(tsList) : !tvIsArray(tsList)) {
2636 raise_error("Invalid type-structure list in RecordReifiedGeneric");
2638 // recordReifiedGenericsAndGetTSList decrefs the tsList
2639 auto const result =
2640 jit::recordReifiedGenericsAndGetTSList(tsList->m_data.parr);
2641 vmStack().discard();
2642 if (RuntimeOption::EvalHackArrDVArrs) {
2643 vmStack().pushStaticVec(result);
2644 } else {
2645 vmStack().pushStaticArray(result);
2649 OPTBLD_INLINE void iopReifiedName(const StringData* name) {
2650 auto const tsList = vmStack().topC();
2651 if (RuntimeOption::EvalHackArrDVArrs ?
2652 !tvIsVec(tsList) : !tvIsArray(tsList)) {
2653 raise_error("Invalid type-structure list in ReifiedName");
2655 // recordReifiedGenericsAndGetName decrefs the tsList
2656 auto const result = jit::recordReifiedGenericsAndGetName(tsList->m_data.parr);
2657 auto const mangledName = mangleReifiedName(name, result);
2658 vmStack().discard();
2659 vmStack().pushStaticString(mangledName);
2662 OPTBLD_INLINE void iopCheckReifiedGenericMismatch() {
2663 Class* cls = arGetContextClass(vmfp());
2664 if (!cls) raise_error("No class scope is active");
2665 auto const c = vmStack().topC();
2666 if (RuntimeOption::EvalHackArrDVArrs ?
2667 !tvIsVec(c) : !tvIsArray(c)) {
2668 raise_error("Invalid type-structure list in CheckReifiedGenericMismatch");
2670 checkClassReifiedGenericMismatch(cls, c->m_data.parr);
2671 vmStack().popC();
2674 OPTBLD_INLINE void iopPrint() {
2675 Cell* c1 = vmStack().topC();
2676 g_context->write(cellAsVariant(*c1).toString());
2677 vmStack().replaceC<KindOfInt64>(1);
2680 OPTBLD_INLINE void iopClone() {
2681 TypedValue* tv = vmStack().topTV();
2682 if (tv->m_type != KindOfObject) {
2683 raise_error("clone called on non-object");
2685 auto newobj = tv->m_data.pobj->clone();
2686 vmStack().popTV();
2687 vmStack().pushObjectNoRc(newobj);
2690 OPTBLD_INLINE void iopExit() {
2691 int exitCode = 0;
2692 Cell* c1 = vmStack().topC();
2693 if (c1->m_type == KindOfInt64) {
2694 exitCode = c1->m_data.num;
2695 } else {
2696 g_context->write(cellAsVariant(*c1).toString());
2698 vmStack().popC();
2699 vmStack().pushNull();
2700 throw ExitException(exitCode);
2703 OPTBLD_INLINE void iopFatal(FatalOp kind_char) {
2704 TypedValue* top = vmStack().topTV();
2705 std::string msg;
2706 if (isStringType(top->m_type)) {
2707 msg = top->m_data.pstr->data();
2708 } else {
2709 msg = "Fatal error message not a string";
2711 vmStack().popTV();
2713 switch (kind_char) {
2714 case FatalOp::RuntimeOmitFrame:
2715 raise_error_without_first_frame(msg);
2716 break;
2717 case FatalOp::Runtime:
2718 case FatalOp::Parse:
2719 raise_error(msg);
2720 break;
2724 OPTBLD_INLINE void jmpSurpriseCheck(Offset offset) {
2725 if (offset <= 0 && UNLIKELY(checkSurpriseFlags())) {
2726 auto const flags = handle_request_surprise();
2728 // Memory Threhsold callback should also be fired here
2729 if (flags & MemThresholdFlag) {
2730 EventHook::DoMemoryThresholdCallback();
2735 OPTBLD_INLINE void iopJmp(PC& pc, PC targetpc) {
2736 jmpSurpriseCheck(targetpc - pc);
2737 pc = targetpc;
2740 OPTBLD_INLINE void iopJmpNS(PC& pc, PC targetpc) {
2741 pc = targetpc;
2744 template<Op op>
2745 OPTBLD_INLINE void jmpOpImpl(PC& pc, PC targetpc) {
2746 static_assert(op == OpJmpZ || op == OpJmpNZ,
2747 "jmpOpImpl should only be used by JmpZ and JmpNZ");
2748 jmpSurpriseCheck(targetpc - pc);
2750 Cell* c1 = vmStack().topC();
2751 if (c1->m_type == KindOfInt64 || c1->m_type == KindOfBoolean) {
2752 int64_t n = c1->m_data.num;
2753 vmStack().popX();
2754 if (op == OpJmpZ ? n == 0 : n != 0) pc = targetpc;
2755 } else {
2756 auto const cond = cellAsCVarRef(*c1).toBoolean();
2757 vmStack().popC();
2758 if (op == OpJmpZ ? !cond : cond) pc = targetpc;
2762 OPTBLD_INLINE void iopJmpZ(PC& pc, PC targetpc) {
2763 jmpOpImpl<OpJmpZ>(pc, targetpc);
2766 OPTBLD_INLINE void iopJmpNZ(PC& pc, PC targetpc) {
2767 jmpOpImpl<OpJmpNZ>(pc, targetpc);
2770 OPTBLD_INLINE void iopSelect() {
2771 auto const cond = [&]{
2772 auto c = vmStack().topC();
2773 if (c->m_type == KindOfInt64 || c->m_type == KindOfBoolean) {
2774 auto const val = (bool)c->m_data.num;
2775 vmStack().popX();
2776 return val;
2777 } else {
2778 auto const val = cellAsCVarRef(*c).toBoolean();
2779 vmStack().popC();
2780 return val;
2782 }();
2784 if (cond) {
2785 auto const t = *vmStack().topC();
2786 vmStack().discard();
2787 vmStack().replaceC(t);
2788 } else {
2789 vmStack().popC();
2793 OPTBLD_INLINE
2794 void iopIterBreak(PC& pc, PC targetpc, const IterTable& iterTab) {
2795 for (auto const& ent : iterTab) {
2796 auto iter = frame_iter(vmfp(), ent.id);
2797 switch (ent.kind) {
2798 case KindOfIter: iter->free(); break;
2799 case KindOfLIter: iter->free(); break;
2802 pc = targetpc;
2805 enum class SwitchMatch {
2806 NORMAL, // value was converted to an int: match normally
2807 NONZERO, // can't be converted to an int: match first nonzero case
2808 DEFAULT, // can't be converted to an int: match default case
2811 static SwitchMatch doubleCheck(double d, int64_t& out) {
2812 if (int64_t(d) == d) {
2813 out = d;
2814 return SwitchMatch::NORMAL;
2816 return SwitchMatch::DEFAULT;
2819 OPTBLD_INLINE
2820 void iopSwitch(PC origpc, PC& pc, SwitchKind kind, int64_t base,
2821 imm_array<Offset> jmptab) {
2822 auto const veclen = jmptab.size;
2823 assertx(veclen > 0);
2824 TypedValue* val = vmStack().topTV();
2825 if (kind == SwitchKind::Unbounded) {
2826 assertx(val->m_type == KindOfInt64);
2827 // Continuation switch: no bounds checking needed
2828 int64_t label = val->m_data.num;
2829 vmStack().popX();
2830 assertx(label >= 0 && label < veclen);
2831 pc = origpc + jmptab[label];
2832 } else {
2833 // Generic integer switch
2834 int64_t intval;
2835 SwitchMatch match = SwitchMatch::NORMAL;
2837 [&] {
2838 switch (val->m_type) {
2839 case KindOfUninit:
2840 case KindOfNull:
2841 intval = 0;
2842 return;
2844 case KindOfBoolean:
2845 // bool(true) is equal to any non-zero int, bool(false) == 0
2846 if (val->m_data.num) {
2847 match = SwitchMatch::NONZERO;
2848 } else {
2849 intval = 0;
2851 return;
2853 case KindOfInt64:
2854 intval = val->m_data.num;
2855 return;
2857 case KindOfDouble:
2858 match = doubleCheck(val->m_data.dbl, intval);
2859 return;
2861 case KindOfFunc:
2862 case KindOfClass:
2863 case KindOfPersistentString:
2864 case KindOfString: {
2865 double dval = 0.0;
2866 auto const str =
2867 isFuncType(val->m_type) ? funcToStringHelper(val->m_data.pfunc) :
2868 isClassType(val->m_type) ? classToStringHelper(val->m_data.pclass) :
2869 val->m_data.pstr;
2870 DataType t = str->isNumericWithVal(intval, dval, 1);
2871 switch (t) {
2872 case KindOfNull:
2873 intval = 0;
2874 break;
2875 case KindOfInt64:
2876 // do nothing
2877 break;
2878 case KindOfDouble:
2879 match = doubleCheck(dval, intval);
2880 break;
2881 case KindOfUninit:
2882 case KindOfBoolean:
2883 case KindOfPersistentString:
2884 case KindOfString:
2885 case KindOfPersistentVec:
2886 case KindOfVec:
2887 case KindOfPersistentDict:
2888 case KindOfDict:
2889 case KindOfPersistentKeyset:
2890 case KindOfKeyset:
2891 case KindOfPersistentShape:
2892 case KindOfShape:
2893 case KindOfPersistentArray:
2894 case KindOfArray:
2895 case KindOfObject:
2896 case KindOfResource:
2897 case KindOfRef:
2898 case KindOfFunc:
2899 case KindOfClass:
2900 case KindOfClsMeth:
2901 case KindOfRecord:
2902 not_reached();
2904 if (val->m_type == KindOfString) tvDecRefStr(val);
2905 return;
2908 case KindOfVec:
2909 tvDecRefArr(val);
2910 case KindOfPersistentVec:
2911 match = SwitchMatch::DEFAULT;
2912 return;
2914 case KindOfDict:
2915 tvDecRefArr(val);
2916 case KindOfPersistentDict:
2917 match = SwitchMatch::DEFAULT;
2918 return;
2920 case KindOfKeyset:
2921 tvDecRefArr(val);
2922 case KindOfPersistentKeyset:
2923 match = SwitchMatch::DEFAULT;
2924 return;
2926 case KindOfShape:
2927 tvDecRefArr(val);
2928 case KindOfPersistentShape:
2929 match = SwitchMatch::DEFAULT;
2930 return;
2932 case KindOfArray:
2933 tvDecRefArr(val);
2934 case KindOfPersistentArray:
2935 match = SwitchMatch::DEFAULT;
2936 return;
2938 case KindOfClsMeth:
2939 tvDecRefClsMeth(val);
2940 match = SwitchMatch::DEFAULT;
2941 break;
2943 case KindOfObject:
2944 intval = val->m_data.pobj->toInt64();
2945 tvDecRefObj(val);
2946 return;
2948 case KindOfResource:
2949 intval = val->m_data.pres->data()->o_toInt64();
2950 tvDecRefRes(val);
2951 return;
2953 case KindOfRecord: // TODO (T41029094)
2954 raise_error(Strings::RECORD_NOT_SUPPORTED);
2956 case KindOfRef:
2957 break;
2959 not_reached();
2960 }();
2961 vmStack().discard();
2963 if (match != SwitchMatch::NORMAL ||
2964 intval < base || intval >= (base + veclen - 2)) {
2965 switch (match) {
2966 case SwitchMatch::NORMAL:
2967 case SwitchMatch::DEFAULT:
2968 pc = origpc + jmptab[veclen - 1];
2969 break;
2971 case SwitchMatch::NONZERO:
2972 pc = origpc + jmptab[veclen - 2];
2973 break;
2975 } else {
2976 pc = origpc + jmptab[intval - base];
2981 OPTBLD_INLINE
2982 void iopSSwitch(PC origpc, PC& pc, imm_array<StrVecItem> jmptab) {
2983 auto const veclen = jmptab.size;
2984 assertx(veclen > 1);
2985 unsigned cases = veclen - 1; // the last vector item is the default case
2986 Cell* val = tvToCell(vmStack().topTV());
2987 Unit* u = vmfp()->m_func->unit();
2988 unsigned i;
2989 for (i = 0; i < cases; ++i) {
2990 auto item = jmptab[i];
2991 const StringData* str = u->lookupLitstrId(item.str);
2992 if (cellEqual(*val, str)) {
2993 pc = origpc + item.dest;
2994 vmStack().popC();
2995 return;
2998 // default case
2999 pc = origpc + jmptab[veclen - 1].dest;
3000 vmStack().popC();
3004 * jitReturnPre and jitReturnPost are used by RetC/V, CreateCont, NativeImpl,
3005 * Yield, and YieldK to perform a few tasks related to interpreting out of a
3006 * frame:
3008 * - If the current frame was entered in the TC and the jit is now off, we
3009 * throw a VMSwitchMode at the beginning of the bytecode to execute the
3010 * call's catch block (if present) before performing the return.
3011 * - If the current frame was entered in the TC and the jit is still on,
3012 * we wait until the end of the bytecode and throw a VMResumeTC, to return to
3013 * our translated caller rather than interpreting back into it.
3014 * - If the current frame was entered by the interpreter but was active when
3015 * the jit called MCGenerator::handleResume() (meaning it's the saved value
3016 * of %rbp in handleResume()'s stack frame), throw a VMResumeTC to reenter
3017 * handleResume(). This is necessary to update the value of %rbp in the TC
3018 * frame, so the unwinder doesn't read from a dead VM frame if something
3019 * throws from the interpreter later on.
3021 namespace {
3022 struct JitReturn {
3023 uint64_t savedRip;
3024 ActRec* fp;
3025 ActRec* sfp;
3026 uint32_t callOff;
3029 OPTBLD_INLINE JitReturn jitReturnPre(ActRec* fp) {
3030 auto savedRip = fp->m_savedRip;
3031 if (isReturnHelper(reinterpret_cast<void*>(savedRip))) {
3032 // This frame wasn't called from the TC, so it's ok to return using the
3033 // interpreter. callToExit is special: it's a return helper but we don't
3034 // treat it like one in here in order to simplify some things higher up in
3035 // the pipeline.
3036 if (reinterpret_cast<TCA>(savedRip) != jit::tc::ustubs().callToExit) {
3037 savedRip = 0;
3039 } else if (!RID().getJit()) {
3040 // We entered this frame in the TC but the jit is now disabled, probably
3041 // because a debugger is attached. If we leave this frame in the
3042 // interpreter, we might be skipping a catch block that our caller expects
3043 // to be run. Switch to the interpreter before even beginning the
3044 // instruction.
3045 throw VMSwitchMode();
3048 return {savedRip, fp, fp->sfp(), fp->m_callOff};
3051 OPTBLD_INLINE TCA jitReturnPost(JitReturn retInfo) {
3052 if (retInfo.savedRip) {
3053 if (isDebuggerReturnHelper(reinterpret_cast<void*>(retInfo.savedRip))) {
3054 // Our return address was smashed by the debugger. Do the work of the
3055 // debuggerRetHelper by setting some unwinder RDS info and resuming at
3056 // the approprate catch trace.
3057 assertx(jit::g_unwind_rds.isInit());
3058 jit::g_unwind_rds->debuggerReturnSP = vmsp();
3059 jit::g_unwind_rds->debuggerCallOff = retInfo.callOff;
3060 return jit::unstashDebuggerCatch(retInfo.fp);
3063 // This frame was called by translated code so we can't interpret out of
3064 // it. Resume in the TC right after our caller. This situation most
3065 // commonly happens when we interpOne a RetC due to having a VarEnv or some
3066 // other weird case.
3067 return TCA(retInfo.savedRip);
3070 if (!retInfo.sfp) {
3071 // If we don't have an sfp, we're returning from the first frame in this VM
3072 // nesting level. The vmJitCalledFrame() check below is only important if
3073 // we might throw before returning to the TC, which is guaranteed to not
3074 // happen in this situation.
3075 assertx(vmfp() == nullptr);
3076 return nullptr;
3080 // Consider a situation with a PHP function f() that calls another function
3081 // g(). If the call is interpreted, then we spend some time in the TC inside
3082 // g(), then eventually end in dispatchBB() (called by
3083 // MCGenerator::handleResume()) for g()'s RetC, the logic here kicks in.
3085 // g()'s VM frame was in %rbp when the TC called handleResume(), so it's
3086 // saved somewhere in handleResume()'s stack frame. If we return out of that
3087 // frame and keep going in the interpreter, that saved %rbp will be pointing
3088 // to a garbage VM frame. This is a problem if something needs to throw an
3089 // exception up through handleResume() and the TC frames above it, since the
3090 // C++ unwinder will attempt to treat parts of the popped VM frame as
3091 // pointers and segfault.
3093 // To avoid running with this dangling saved %rbp a few frames up, we
3094 // immediately throw an exception that is "caught" by the TC frame that
3095 // called handleResume(). We resume execution in the TC which reloads the new
3096 // vmfp() into %rbp, then handleResume() is called again, this time with a
3097 // live VM frame in %rbp.
3098 if (vmJitCalledFrame() == retInfo.fp) {
3099 FTRACE(1, "Returning from frame {}; resuming", vmJitCalledFrame());
3100 return jit::tc::ustubs().resumeHelper;
3103 return nullptr;
3106 OPTBLD_INLINE void returnToCaller(PC& pc, ActRec* sfp, Offset callOff) {
3107 vmfp() = sfp;
3108 pc = LIKELY(sfp != nullptr)
3109 ? skipCall(sfp->func()->getEntry() + callOff)
3110 : nullptr;
3115 template <bool suspended>
3116 OPTBLD_INLINE TCA ret(PC& pc) {
3117 assertx(!suspended || vmfp()->func()->isAsyncFunction());
3118 assertx(!suspended || !vmfp()->resumed());
3120 auto const jitReturn = jitReturnPre(vmfp());
3122 // Get the return value.
3123 TypedValue retval = *vmStack().topTV();
3124 vmStack().discard();
3126 assertx(
3127 !suspended || (tvIsObject(retval) && retval.m_data.pobj->isWaitHandle())
3130 // Free $this and local variables. Calls FunctionReturn hook. The return
3131 // value must be removed from the stack, or the unwinder would try to free it
3132 // if the hook throws---but the event hook routine decrefs the return value
3133 // in that case if necessary.
3134 frame_free_locals_inl(vmfp(), vmfp()->func()->numLocals(), &retval);
3136 // Grab caller info from ActRec.
3137 ActRec* sfp = vmfp()->sfp();
3138 Offset callOff = vmfp()->m_callOff;
3140 if (LIKELY(!vmfp()->resumed())) {
3141 // If in an eagerly executed async function, wrap the return value into
3142 // succeeded StaticWaitHandle. Async eager return requests are currently
3143 // not respected, as we don't have a way to obtain the async eager offset.
3144 if (UNLIKELY(vmfp()->func()->isAsyncFunction()) && !suspended) {
3145 auto const& retvalCell = *tvAssertCell(&retval);
3146 // Heads up that we're assuming CreateSucceeded can't throw, or we won't
3147 // decref the return value. (It can't right now.)
3148 auto const waitHandle = c_StaticWaitHandle::CreateSucceeded(retvalCell);
3149 cellCopy(make_tv<KindOfObject>(waitHandle), retval);
3152 // Free ActRec and store the return value.
3153 vmStack().ndiscard(vmfp()->func()->numSlotsInFrame());
3154 vmStack().ret();
3155 *vmStack().topTV() = retval;
3156 assertx(vmStack().topTV() == vmfp()->retSlot());
3157 // In case async eager return was requested by the caller, pretend that
3158 // we did not finish eagerly as we already boxed the value.
3159 vmStack().topTV()->m_aux.u_asyncNonEagerReturnFlag = -1;
3160 } else if (vmfp()->func()->isAsyncFunction()) {
3161 // Mark the async function as succeeded and store the return value.
3162 assertx(!sfp);
3163 auto wh = frame_afwh(vmfp());
3164 wh->ret(retval);
3165 decRefObj(wh);
3166 } else if (vmfp()->func()->isAsyncGenerator()) {
3167 // Mark the async generator as finished.
3168 assertx(isNullType(retval.m_type));
3169 auto const gen = frame_async_generator(vmfp());
3170 auto const eagerResult = gen->ret();
3171 if (eagerResult) {
3172 // Eager execution => return StaticWaitHandle.
3173 assertx(sfp);
3174 vmStack().pushObjectNoRc(eagerResult);
3175 } else {
3176 // Resumed execution => return control to the scheduler.
3177 assertx(!sfp);
3179 } else if (vmfp()->func()->isNonAsyncGenerator()) {
3180 // Mark the generator as finished and store the return value.
3181 frame_generator(vmfp())->ret(retval);
3183 // Push return value of next()/send()/raise().
3184 vmStack().pushNull();
3185 } else {
3186 not_reached();
3189 // Return control to the caller.
3190 returnToCaller(pc, sfp, callOff);
3192 return jitReturnPost(jitReturn);
3195 OPTBLD_INLINE TCA iopRetC(PC& pc) {
3196 return ret<false>(pc);
3199 OPTBLD_INLINE TCA iopRetCSuspended(PC& pc) {
3200 assertx(vmfp()->func()->isAsyncFunction());
3201 assertx(!vmfp()->resumed());
3202 return ret<true>(pc);
3205 OPTBLD_INLINE TCA iopRetM(PC& pc, uint32_t numRet) {
3206 auto const jitReturn = jitReturnPre(vmfp());
3208 req::vector<TypedValue> retvals;
3209 retvals.reserve(numRet);
3211 for (int i = numRet - 1; i >= 0; i--) {
3212 retvals.push_back(*vmStack().indC(i));
3215 vmStack().ndiscard(numRet);
3217 // Free $this and local variables. Calls FunctionReturn hook. The return
3218 // value must be removed from the stack, or the unwinder would try to free it
3219 // if the hook throws---but the event hook routine decrefs the return value
3220 // in that case if necessary.
3221 frame_free_locals_inl(vmfp(), vmfp()->func()->numLocals(), &retvals[0]);
3223 assertx(!vmfp()->func()->isGenerator() && !vmfp()->func()->isAsync());
3225 // Grab caller info from ActRec.
3226 ActRec* sfp = vmfp()->sfp();
3227 Offset callOff = vmfp()->m_callOff;
3229 // Free ActRec and store the return value.
3230 vmStack().ndiscard(vmfp()->func()->numSlotsInFrame());
3231 vmStack().ret();
3233 // Discard scratch space for return values allocated for multi return FCall
3234 vmStack().ndiscard(numRet - 1);
3235 *vmStack().topTV() = retvals[1];
3237 for (int i = 2; i < numRet; i++) {
3238 *vmStack().allocTV() = retvals[i];
3241 // Store the actual return value at the top of the stack
3242 *vmStack().allocTV() = retvals[0];
3244 // Return control to the caller.
3245 returnToCaller(pc, sfp, callOff);
3247 return jitReturnPost(jitReturn);
3250 OPTBLD_INLINE void iopThrow(PC&) {
3251 Cell* c1 = vmStack().topC();
3252 if (c1->m_type != KindOfObject ||
3253 !c1->m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) {
3254 raise_error("Exceptions must implement the Throwable interface.");
3256 auto obj = Object::attach(c1->m_data.pobj);
3257 vmStack().discard();
3258 DEBUGGER_ATTACHED_ONLY(phpDebuggerExceptionThrownHook(obj.get()));
3259 throw req::root<Object>(std::move(obj));
3262 OPTBLD_INLINE void iopClassGetC() {
3263 auto const cell = vmStack().topC();
3264 if (isStringType(cell->m_type)) {
3265 raise_str_to_class_notice(cell->m_data.pstr);
3267 auto const cls = lookupClsRef(cell);
3268 vmStack().popC();
3269 vmStack().pushClass(cls);
3272 OPTBLD_INLINE void iopClassGetTS() {
3273 auto const cell = vmStack().topC();
3274 if (!tvIsDictOrDArray(cell)) {
3275 raise_error("Reified type must be a type structure");
3277 auto const ts = cell->m_data.parr;
3278 auto const classname_field = ts->rval(s_classname.get());
3279 if (!classname_field.is_set()) {
3280 raise_error("You cannot create a new instance of this type as "
3281 "it is not a class");
3283 assertx(isStringType(classname_field.type()));
3284 auto const name = classname_field.val().pstr;
3285 auto const generics_field = ts->rval(s_generic_types.get());
3286 auto mangledName = name;
3287 ArrayData* reified_types = nullptr;
3288 if (generics_field.is_set()) {
3289 reified_types = generics_field.val().parr;
3290 auto const mangledTypeName =
3291 makeStaticString(mangleReifiedGenericsName(reified_types));
3292 reified_types->incRefCount();
3293 reified_types = addToReifiedGenericsTable(mangledTypeName, reified_types);
3294 mangledName = mangleReifiedName(name, mangledTypeName);
3296 auto tv = make_tv<KindOfString>(mangledName);
3297 auto const cls = lookupClsRef(&tv);
3299 vmStack().popC();
3300 vmStack().pushClass(cls);
3301 if (reified_types) {
3302 if (RuntimeOption::EvalHackArrDVArrs) {
3303 vmStack().pushStaticVec(reified_types);
3304 } else {
3305 vmStack().pushStaticArray(reified_types);
3307 } else {
3308 vmStack().pushNull();
3312 static void raise_undefined_local(ActRec* fp, Id pind) {
3313 assertx(pind < fp->m_func->numNamedLocals());
3314 raise_notice(Strings::UNDEFINED_VARIABLE,
3315 fp->m_func->localVarName(pind)->data());
3318 static inline void cgetl_inner_body(TypedValue* fr, TypedValue* to) {
3319 assertx(fr->m_type != KindOfUninit);
3320 cellDup(*tvToCell(fr), *to);
3323 OPTBLD_INLINE void cgetl_body(ActRec* fp,
3324 TypedValue* fr,
3325 TypedValue* to,
3326 Id pind,
3327 bool warn) {
3328 if (fr->m_type == KindOfUninit) {
3329 // `to' is uninitialized here, so we need to tvWriteNull before
3330 // possibly causing stack unwinding.
3331 tvWriteNull(*to);
3332 if (warn) raise_undefined_local(fp, pind);
3333 } else {
3334 cgetl_inner_body(fr, to);
3338 OPTBLD_FLT_INLINE void iopCGetL(local_var fr) {
3339 Cell* to = vmStack().allocC();
3340 cgetl_body(vmfp(), fr.ptr, to, fr.index, true);
3343 OPTBLD_INLINE void iopCGetQuietL(local_var fr) {
3344 Cell* to = vmStack().allocC();
3345 cgetl_body(vmfp(), fr.ptr, to, fr.index, false);
3348 OPTBLD_INLINE void iopCUGetL(local_var fr) {
3349 auto to = vmStack().allocTV();
3350 tvDup(*tvToCell(fr.ptr), *to);
3353 OPTBLD_INLINE void iopCGetL2(local_var fr) {
3354 TypedValue* oldTop = vmStack().topTV();
3355 TypedValue* newTop = vmStack().allocTV();
3356 memcpy(newTop, oldTop, sizeof *newTop);
3357 Cell* to = oldTop;
3358 cgetl_body(vmfp(), fr.ptr, to, fr.index, true);
3361 OPTBLD_INLINE void iopPushL(local_var locVal) {
3362 assertx(locVal->m_type != KindOfUninit);
3363 assertx(!isRefType(locVal->m_type));
3364 TypedValue* dest = vmStack().allocTV();
3365 *dest = *locVal;
3366 locVal->m_type = KindOfUninit;
3369 OPTBLD_INLINE void iopCGetG() {
3370 StringData* name;
3371 TypedValue* to = vmStack().topTV();
3372 TypedValue* fr = nullptr;
3373 lookup_gbl(vmfp(), name, to, fr);
3374 SCOPE_EXIT { decRefStr(name); };
3375 tvDecRefGen(to);
3376 if (fr == nullptr || fr->m_type == KindOfUninit) {
3377 tvWriteNull(*to);
3378 } else {
3379 cgetl_inner_body(fr, to);
3383 struct SpropState {
3384 SpropState(Stack&, bool ignoreLateInit);
3385 ~SpropState();
3386 StringData* name;
3387 Class* cls;
3388 TypedValue* output;
3389 TypedValue* val;
3390 TypedValue oldNameCell;
3391 Slot slot;
3392 bool visible;
3393 bool accessible;
3394 bool constant;
3395 Stack& vmstack;
3398 SpropState::SpropState(Stack& vmstack, bool ignoreLateInit) : vmstack{vmstack} {
3399 auto const clsCell = vmstack.topC();
3400 auto const nameCell = output = vmstack.indTV(1);
3401 if (!isClassType(clsCell->m_type)) {
3402 raise_error("SpropState: expected class");
3404 cls = clsCell->m_data.pclass;
3405 lookup_sprop(vmfp(), cls, name, nameCell, val,
3406 slot, visible, accessible, constant, ignoreLateInit);
3407 oldNameCell = *nameCell;
3410 SpropState::~SpropState() {
3411 vmstack.discard();
3412 decRefStr(name);
3413 tvDecRefGen(oldNameCell);
3416 OPTBLD_INLINE void iopCGetS() {
3417 SpropState ss(vmStack(), false);
3418 if (!(ss.visible && ss.accessible)) {
3419 raise_error("Invalid static property access: %s::%s",
3420 ss.cls->name()->data(),
3421 ss.name->data());
3423 cellDup(*tvToCell(ss.val), *ss.output);
3426 static inline MInstrState& initMState() {
3427 auto& mstate = vmMInstrState();
3428 tvWriteUninit(mstate.tvRef);
3429 tvWriteUninit(mstate.tvRef2);
3430 mstate.propState = MInstrPropState{};
3431 return mstate;
3434 static inline void baseGImpl(TypedValue* key, MOpMode mode) {
3435 auto& mstate = initMState();
3436 StringData* name;
3437 TypedValue* baseVal;
3439 if (mode == MOpMode::Define) lookupd_gbl(vmfp(), name, key, baseVal);
3440 else lookup_gbl(vmfp(), name, key, baseVal);
3441 SCOPE_EXIT { decRefStr(name); };
3443 if (baseVal == nullptr) {
3444 assertx(mode != MOpMode::Define);
3445 if (mode == MOpMode::Warn) throwArrayKeyException(name, false);
3446 tvWriteNull(mstate.tvTempBase);
3447 mstate.base = &mstate.tvTempBase;
3448 return;
3451 mstate.base = baseVal;
3454 OPTBLD_INLINE void iopBaseGC(uint32_t idx, MOpMode mode) {
3455 baseGImpl(vmStack().indTV(idx), mode);
3458 OPTBLD_INLINE void iopBaseGL(local_var loc, MOpMode mode) {
3459 baseGImpl(tvToCell(loc.ptr), mode);
3462 OPTBLD_INLINE void iopBaseSC(uint32_t keyIdx, uint32_t clsIdx, MOpMode mode) {
3463 auto& mstate = initMState();
3465 auto const clsCell = vmStack().indC(clsIdx);
3466 auto const key = vmStack().indTV(keyIdx);
3468 if (!isClassType(clsCell->m_type)) {
3469 raise_error("Attempting to obtain static base on non-class");
3471 auto const class_ = clsCell->m_data.pclass;
3473 auto const name = lookup_name(key);
3474 SCOPE_EXIT { decRefStr(name); };
3475 auto const lookup = class_->getSProp(arGetContextClass(vmfp()), name);
3476 if (!lookup.val || !lookup.accessible) {
3477 raise_error("Invalid static property access: %s::%s",
3478 class_->name()->data(),
3479 name->data());
3482 if (lookup.constant && (mode == MOpMode::Define ||
3483 mode == MOpMode::Unset || mode == MOpMode::InOut)) {
3484 throw_cannot_modify_static_const_prop(class_->name()->data(),
3485 name->data());
3488 if (RuntimeOption::EvalCheckPropTypeHints > 0 && mode == MOpMode::Define) {
3489 vmMInstrState().propState = MInstrPropState{class_, lookup.slot, true};
3492 mstate.base = tv_lval(lookup.val);
3495 OPTBLD_INLINE void baseLImpl(local_var loc, MOpMode mode) {
3496 auto& mstate = initMState();
3497 auto local = tvToCell(loc.ptr);
3498 if (mode == MOpMode::Warn && local->m_type == KindOfUninit) {
3499 raise_notice(Strings::UNDEFINED_VARIABLE,
3500 vmfp()->m_func->localVarName(loc.index)->data());
3502 mstate.base = local;
3505 OPTBLD_INLINE void iopBaseL(local_var loc, MOpMode mode) {
3506 baseLImpl(loc, mode);
3509 OPTBLD_INLINE void iopBaseC(uint32_t idx, MOpMode) {
3510 auto& mstate = initMState();
3511 mstate.base = vmStack().indC(idx);
3514 OPTBLD_INLINE void iopBaseH() {
3515 auto& mstate = initMState();
3516 mstate.tvTempBase = make_tv<KindOfObject>(vmfp()->getThis());
3517 mstate.base = &mstate.tvTempBase;
3520 static OPTBLD_INLINE void propDispatch(MOpMode mode, TypedValue key) {
3521 auto& mstate = vmMInstrState();
3522 auto pState = &mstate.propState;
3523 auto ctx = arGetContextClass(vmfp());
3525 auto const result = [&]{
3526 switch (mode) {
3527 case MOpMode::None:
3528 return Prop<MOpMode::None>(mstate.tvRef, ctx, mstate.base, key, pState);
3529 case MOpMode::Warn:
3530 return Prop<MOpMode::Warn>(mstate.tvRef, ctx, mstate.base, key, pState);
3531 case MOpMode::Define:
3532 return Prop<MOpMode::Define,KeyType::Any>(
3533 mstate.tvRef, ctx, mstate.base, key, pState
3535 case MOpMode::Unset:
3536 return Prop<MOpMode::Unset>(
3537 mstate.tvRef, ctx, mstate.base, key, pState
3539 case MOpMode::InOut:
3540 always_assert_flog(false, "MOpMode::InOut can only occur on Elem");
3542 always_assert(false);
3543 }();
3545 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3548 static OPTBLD_INLINE void propQDispatch(MOpMode mode, TypedValue key) {
3549 auto& mstate = vmMInstrState();
3550 auto ctx = arGetContextClass(vmfp());
3552 assertx(mode == MOpMode::None || mode == MOpMode::Warn);
3553 assertx(key.m_type == KindOfPersistentString);
3554 auto const result = nullSafeProp(mstate.tvRef, ctx, mstate.base,
3555 key.m_data.pstr);
3556 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3559 static OPTBLD_INLINE
3560 void elemDispatch(MOpMode mode, TypedValue key) {
3561 auto& mstate = vmMInstrState();
3562 auto const b = mstate.base;
3564 auto const result = [&]() -> tv_rval {
3565 switch (mode) {
3566 case MOpMode::None:
3567 return Elem<MOpMode::None>(mstate.tvRef, b, key);
3568 case MOpMode::Warn:
3569 return Elem<MOpMode::Warn>(mstate.tvRef, b, key);
3570 case MOpMode::InOut:
3571 return Elem<MOpMode::InOut>(mstate.tvRef, b, key);
3572 case MOpMode::Define:
3573 if (RuntimeOption::EvalArrayProvenance) {
3574 return ElemD<MOpMode::Define, KeyType::Any, true>(
3575 mstate.tvRef, b, key, &mstate.propState
3577 } else {
3578 return ElemD<MOpMode::Define, KeyType::Any, false>(
3579 mstate.tvRef, b, key, &mstate.propState
3582 case MOpMode::Unset:
3583 return ElemU(mstate.tvRef, b, key);
3585 always_assert(false);
3586 }().as_lval();
3588 if (mode == MOpMode::Define) mstate.propState = MInstrPropState{};
3589 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3592 static inline TypedValue key_tv(MemberKey key) {
3593 switch (key.mcode) {
3594 case MW:
3595 return TypedValue{};
3596 case MEL: case MPL: {
3597 auto local = tvToCell(frame_local(vmfp(), key.iva));
3598 if (local->m_type == KindOfUninit) {
3599 raise_undefined_local(vmfp(), key.iva);
3600 return make_tv<KindOfNull>();
3602 return *local;
3604 case MEC: case MPC:
3605 return *vmStack().indTV(key.iva);
3606 case MEI:
3607 return make_tv<KindOfInt64>(key.int64);
3608 case MET: case MPT: case MQT:
3609 return make_tv<KindOfPersistentString>(key.litstr);
3611 not_reached();
3614 static OPTBLD_INLINE void dimDispatch(MOpMode mode, MemberKey mk) {
3615 auto const key = key_tv(mk);
3616 if (mk.mcode == MQT) {
3617 propQDispatch(mode, key);
3618 } else if (mcodeIsProp(mk.mcode)) {
3619 propDispatch(mode, key);
3620 } else if (mcodeIsElem(mk.mcode)) {
3621 elemDispatch(mode, key);
3622 } else {
3623 if (mode == MOpMode::Warn) raise_error("Cannot use [] for reading");
3625 auto& mstate = vmMInstrState();
3626 auto const base = mstate.base;
3627 auto const result = [&] {
3628 return NewElem(mstate.tvRef, base, &mstate.propState);
3629 }();
3630 if (mode == MOpMode::Define) mstate.propState = MInstrPropState{};
3631 mstate.base = ratchetRefs(result, mstate.tvRef, mstate.tvRef2);
3635 OPTBLD_INLINE void iopDim(MOpMode mode, MemberKey mk) {
3636 dimDispatch(mode, mk);
3639 static OPTBLD_INLINE void mFinal(MInstrState& mstate,
3640 int32_t nDiscard,
3641 folly::Optional<TypedValue> result) {
3642 auto& stack = vmStack();
3643 for (auto i = 0; i < nDiscard; ++i) stack.popTV();
3644 if (result) tvCopy(*result, *stack.allocTV());
3646 tvDecRefGenUnlikely(mstate.tvRef);
3647 tvDecRefGenUnlikely(mstate.tvRef2);
3650 static OPTBLD_INLINE
3651 void queryMImpl(MemberKey mk, int32_t nDiscard, QueryMOp op) {
3652 auto const key = key_tv(mk);
3653 auto& mstate = vmMInstrState();
3654 TypedValue result;
3655 switch (op) {
3656 case QueryMOp::InOut:
3657 always_assert_flog(
3658 mcodeIsElem(mk.mcode), "QueryM InOut is only compatible with Elem"
3660 // fallthrough
3661 case QueryMOp::CGet:
3662 case QueryMOp::CGetQuiet:
3663 dimDispatch(getQueryMOpMode(op), mk);
3664 tvDup(*tvToCell(mstate.base), result);
3665 break;
3667 case QueryMOp::Isset:
3668 case QueryMOp::Empty:
3669 result.m_type = KindOfBoolean;
3670 if (mcodeIsProp(mk.mcode)) {
3671 auto const ctx = arGetContextClass(vmfp());
3672 result.m_data.num = op == QueryMOp::Empty
3673 ? IssetEmptyProp<true>(ctx, mstate.base, key)
3674 : IssetEmptyProp<false>(ctx, mstate.base, key);
3675 } else {
3676 assertx(mcodeIsElem(mk.mcode));
3678 result.m_data.num = op == QueryMOp::Empty
3679 ? IssetEmptyElem<true>(mstate.base, key)
3680 : IssetEmptyElem<false>(mstate.base, key);
3682 break;
3684 mFinal(mstate, nDiscard, result);
3687 OPTBLD_INLINE void iopQueryM(uint32_t nDiscard, QueryMOp subop, MemberKey mk) {
3688 queryMImpl(mk, nDiscard, subop);
3691 OPTBLD_FLT_INLINE void iopSetM(uint32_t nDiscard, MemberKey mk) {
3692 auto& mstate = vmMInstrState();
3693 auto const topC = vmStack().topC();
3695 if (mk.mcode == MW) {
3696 if (RuntimeOption::EvalArrayProvenance) {
3697 SetNewElem<true, true>(mstate.base, topC, &mstate.propState);
3698 } else {
3699 SetNewElem<true, false>(mstate.base, topC, &mstate.propState);
3701 } else {
3702 auto const key = key_tv(mk);
3703 if (mcodeIsElem(mk.mcode)) {
3704 auto const result = RuntimeOption::EvalArrayProvenance
3705 ? SetElem<true, true>(mstate.base, key, topC, &mstate.propState)
3706 : SetElem<true, false>(mstate.base, key, topC, &mstate.propState);
3708 if (result) {
3709 tvDecRefGen(topC);
3710 topC->m_type = KindOfString;
3711 topC->m_data.pstr = result;
3713 } else {
3714 auto const ctx = arGetContextClass(vmfp());
3715 SetProp<true>(ctx, mstate.base, key, topC, &mstate.propState);
3719 auto const result = *topC;
3720 vmStack().discard();
3721 mFinal(mstate, nDiscard, result);
3724 OPTBLD_INLINE void iopSetRangeM(
3725 uint32_t nDiscard, SetRangeOp op, uint32_t size
3727 auto& mstate = vmMInstrState();
3728 auto const count = tvCastToInt64(*vmStack().indC(0));
3729 auto const src = *vmStack().indC(1);
3730 auto const offset = tvCastToInt64(*vmStack().indC(2));
3732 if (op == SetRangeOp::Forward) {
3733 SetRange<false>(mstate.base, offset, src, count, size);
3734 } else {
3735 SetRange<true>(mstate.base, offset, src, count, size);
3738 mFinal(mstate, nDiscard + 3, folly::none);
3741 OPTBLD_INLINE void iopIncDecM(uint32_t nDiscard, IncDecOp subop, MemberKey mk) {
3742 auto const key = key_tv(mk);
3744 auto& mstate = vmMInstrState();
3745 Cell result;
3746 if (mcodeIsProp(mk.mcode)) {
3747 result = IncDecProp(
3748 arGetContextClass(vmfp()), subop, mstate.base, key, &mstate.propState
3750 } else if (mcodeIsElem(mk.mcode)) {
3751 result = IncDecElem(
3752 subop, mstate.base, key, &mstate.propState
3754 } else {
3755 result = IncDecNewElem(mstate.tvRef, subop, mstate.base, &mstate.propState);
3758 mFinal(mstate, nDiscard, result);
3761 OPTBLD_INLINE void iopSetOpM(uint32_t nDiscard, SetOpOp subop, MemberKey mk) {
3762 auto const key = key_tv(mk);
3763 auto const rhs = vmStack().topC();
3765 auto& mstate = vmMInstrState();
3766 tv_lval result;
3767 if (mcodeIsProp(mk.mcode)) {
3768 result = SetOpProp(mstate.tvRef, arGetContextClass(vmfp()), subop,
3769 mstate.base, key, rhs, &mstate.propState);
3770 } else if (mcodeIsElem(mk.mcode)) {
3771 result = SetOpElem(
3772 mstate.tvRef, subop, mstate.base, key, rhs, &mstate.propState
3774 } else {
3775 result =
3776 SetOpNewElem(mstate.tvRef, subop, mstate.base, rhs, &mstate.propState);
3779 vmStack().popC();
3780 result = tvToCell(result);
3781 tvIncRefGen(*result);
3782 mFinal(mstate, nDiscard, *result);
3785 OPTBLD_INLINE void iopUnsetM(uint32_t nDiscard, MemberKey mk) {
3786 auto const key = key_tv(mk);
3788 auto& mstate = vmMInstrState();
3789 if (mcodeIsProp(mk.mcode)) {
3790 UnsetProp(arGetContextClass(vmfp()), mstate.base, key);
3791 } else {
3792 assertx(mcodeIsElem(mk.mcode));
3793 UnsetElem(mstate.base, key);
3796 mFinal(mstate, nDiscard, folly::none);
3799 namespace {
3801 inline void checkThis(ActRec* fp) {
3802 if (!fp->func()->cls() || !fp->hasThis()) {
3803 raise_error(Strings::FATAL_NULL_THIS);
3807 OPTBLD_INLINE const Cell* memoGetImpl(LocalRange keys) {
3808 assertx(vmfp()->m_func->isMemoizeWrapper());
3809 assertx(keys.first + keys.count <= vmfp()->m_func->numLocals());
3811 for (auto i = 0; i < keys.count; ++i) {
3812 auto const key = frame_local(vmfp(), keys.first + i);
3813 if (!isIntType(key->m_type) && !isStringType(key->m_type)) {
3814 raise_error("Memoization keys can only be ints or strings");
3818 auto const c = [&] () -> const Cell* {
3819 auto const func = vmfp()->m_func;
3820 if (!func->isMethod() || func->isStatic()) {
3821 auto const lsbCls =
3822 func->isMemoizeWrapperLSB() ? vmfp()->getClass() : nullptr;
3823 if (keys.count > 0) {
3824 auto cache =
3825 lsbCls ? rds::bindLSBMemoCache(lsbCls, func)
3826 : rds::bindStaticMemoCache(func);
3827 if (!cache.isInit()) return nullptr;
3828 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
3829 if (auto getter = memoCacheGetForKeyCount(keys.count)) {
3830 return getter(*cache, keysBegin);
3832 return memoCacheGetGeneric(
3833 *cache,
3834 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
3835 keysBegin
3839 auto cache =
3840 lsbCls ? rds::bindLSBMemoValue(lsbCls, func)
3841 : rds::bindStaticMemoValue(func);
3842 return cache.isInit() ? cache.get() : nullptr;
3845 checkThis(vmfp());
3846 auto const this_ = vmfp()->getThis();
3847 auto const cls = func->cls();
3848 assertx(this_->instanceof(cls));
3849 assertx(cls->hasMemoSlots());
3851 auto const memoInfo = cls->memoSlotForFunc(func->getFuncId());
3853 auto const slot = UNLIKELY(this_->hasNativeData())
3854 ? this_->memoSlotNativeData(memoInfo.first, cls->getNativeDataInfo()->sz)
3855 : this_->memoSlot(memoInfo.first);
3857 if (keys.count == 0 && !memoInfo.second) {
3858 auto const val = slot->getValue();
3859 return val->m_type != KindOfUninit ? val : nullptr;
3862 auto const cache = slot->getCache();
3863 if (!cache) return nullptr;
3865 if (memoInfo.second) {
3866 if (keys.count == 0) {
3867 return memoCacheGetSharedOnly(
3868 cache,
3869 makeSharedOnlyKey(func->getFuncId())
3872 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
3873 if (auto const getter = sharedMemoCacheGetForKeyCount(keys.count)) {
3874 return getter(cache, func->getFuncId(), keysBegin);
3876 return memoCacheGetGeneric(
3877 cache,
3878 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
3879 keysBegin
3883 assertx(keys.count > 0);
3884 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
3885 if (auto const getter = memoCacheGetForKeyCount(keys.count)) {
3886 return getter(cache, keysBegin);
3888 return memoCacheGetGeneric(
3889 cache,
3890 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
3891 keysBegin
3893 }();
3895 assertx(!c || cellIsPlausible(*c));
3896 assertx(!c || c->m_type != KindOfUninit);
3897 return c;
3902 OPTBLD_INLINE void iopMemoGet(PC& pc, PC notfound, LocalRange keys) {
3903 if (auto const c = memoGetImpl(keys)) {
3904 cellDup(*c, *vmStack().allocC());
3905 } else {
3906 pc = notfound;
3910 OPTBLD_INLINE void iopMemoGetEager(PC& pc,
3911 PC notfound,
3912 PC suspended,
3913 LocalRange keys) {
3914 assertx(vmfp()->m_func->isAsyncFunction());
3915 assertx(!vmfp()->resumed());
3917 if (auto const c = memoGetImpl(keys)) {
3918 cellDup(*c, *vmStack().allocC());
3919 if (c->m_aux.u_asyncNonEagerReturnFlag) {
3920 assertx(tvIsObject(c) && c->m_data.pobj->isWaitHandle());
3921 pc = suspended;
3923 } else {
3924 pc = notfound;
3928 namespace {
3930 OPTBLD_INLINE void memoSetImpl(LocalRange keys, Cell val) {
3931 assertx(vmfp()->m_func->isMemoizeWrapper());
3932 assertx(keys.first + keys.count <= vmfp()->m_func->numLocals());
3933 assertx(cellIsPlausible(val));
3935 for (auto i = 0; i < keys.count; ++i) {
3936 auto const key = frame_local(vmfp(), keys.first + i);
3937 if (!isIntType(key->m_type) && !isStringType(key->m_type)) {
3938 raise_error("Memoization keys can only be ints or strings");
3942 auto const func = vmfp()->m_func;
3943 if (!func->isMethod() || func->isStatic()) {
3944 auto const lsbCls =
3945 func->isMemoizeWrapperLSB() ? vmfp()->getClass() : nullptr;
3946 if (keys.count > 0) {
3947 auto cache =
3948 lsbCls ? rds::bindLSBMemoCache(lsbCls, func)
3949 : rds::bindStaticMemoCache(func);
3950 if (!cache.isInit()) cache.initWith(nullptr);
3951 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
3952 if (auto setter = memoCacheSetForKeyCount(keys.count)) {
3953 return setter(*cache, keysBegin, val);
3955 return memoCacheSetGeneric(
3956 *cache,
3957 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
3958 keysBegin,
3963 auto cache =
3964 lsbCls ? rds::bindLSBMemoValue(lsbCls, func)
3965 : rds::bindStaticMemoValue(func);
3966 if (!cache.isInit()) {
3967 tvWriteUninit(*cache);
3968 cache.markInit();
3971 cellSetWithAux(val, *cache);
3972 return;
3975 checkThis(vmfp());
3976 auto const this_ = vmfp()->getThis();
3977 auto const cls = func->cls();
3978 assertx(this_->instanceof(cls));
3979 assertx(cls->hasMemoSlots());
3981 this_->setAttribute(ObjectData::UsedMemoCache);
3983 auto const memoInfo = cls->memoSlotForFunc(func->getFuncId());
3985 auto slot = UNLIKELY(this_->hasNativeData())
3986 ? this_->memoSlotNativeData(memoInfo.first, cls->getNativeDataInfo()->sz)
3987 : this_->memoSlot(memoInfo.first);
3989 if (keys.count == 0 && !memoInfo.second) {
3990 cellSetWithAux(val, *slot->getValue());
3991 return;
3994 auto& cache = slot->getCacheForWrite();
3996 if (memoInfo.second) {
3997 if (keys.count == 0) {
3998 return memoCacheSetSharedOnly(
3999 cache,
4000 makeSharedOnlyKey(func->getFuncId()),
4004 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
4005 if (auto const setter = sharedMemoCacheSetForKeyCount(keys.count)) {
4006 return setter(cache, func->getFuncId(), keysBegin, val);
4008 return memoCacheSetGeneric(
4009 cache,
4010 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
4011 keysBegin,
4016 assertx(keys.count > 0);
4017 auto const keysBegin = frame_local(vmfp(), keys.first + keys.count - 1);
4018 if (auto const setter = memoCacheSetForKeyCount(keys.count)) {
4019 return setter(cache, keysBegin, val);
4021 return memoCacheSetGeneric(
4022 cache,
4023 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
4024 keysBegin,
4031 OPTBLD_INLINE void iopMemoSet(LocalRange keys) {
4032 auto val = *vmStack().topC();
4033 assertx(val.m_type != KindOfUninit);
4034 if (vmfp()->m_func->isAsyncFunction()) {
4035 assertx(tvIsObject(val) && val.m_data.pobj->isWaitHandle());
4036 val.m_aux.u_asyncNonEagerReturnFlag = -1;
4038 memoSetImpl(keys, val);
4041 OPTBLD_INLINE void iopMemoSetEager(LocalRange keys) {
4042 assertx(vmfp()->m_func->isAsyncFunction());
4043 assertx(!vmfp()->resumed());
4044 auto val = *vmStack().topC();
4045 assertx(val.m_type != KindOfUninit);
4046 val.m_aux.u_asyncNonEagerReturnFlag = 0;
4047 memoSetImpl(keys, val);
4050 OPTBLD_INLINE void iopVGetL(local_var fr) {
4051 Ref* to = vmStack().allocV();
4052 if (!isRefType(fr->m_type)) {
4053 tvBox(*fr);
4055 refDup(*fr, *to);
4058 OPTBLD_INLINE void iopIssetG() {
4059 StringData* name;
4060 TypedValue* tv1 = vmStack().topTV();
4061 TypedValue* tv = nullptr;
4062 bool e;
4063 lookup_gbl(vmfp(), name, tv1, tv);
4064 SCOPE_EXIT { decRefStr(name); };
4065 if (tv == nullptr) {
4066 e = false;
4067 } else {
4068 e = !cellIsNull(tvToCell(tv));
4070 vmStack().replaceC<KindOfBoolean>(e);
4073 OPTBLD_INLINE void iopIssetS() {
4074 SpropState ss(vmStack(), true);
4075 bool e;
4076 if (!(ss.visible && ss.accessible)) {
4077 e = false;
4078 } else {
4079 e = !cellIsNull(tvToCell(ss.val));
4081 ss.output->m_data.num = e;
4082 ss.output->m_type = KindOfBoolean;
4085 OPTBLD_FLT_INLINE void iopIssetL(local_var tv) {
4086 bool ret = !is_null(tvToCell(tv.ptr));
4087 TypedValue* topTv = vmStack().allocTV();
4088 topTv->m_data.num = ret;
4089 topTv->m_type = KindOfBoolean;
4092 OPTBLD_INLINE static bool isTypeHelper(Cell* val, IsTypeOp op) {
4093 assertx(cellIsPlausible(*val));
4095 switch (op) {
4096 case IsTypeOp::Null: return is_null(val);
4097 case IsTypeOp::Bool: return is_bool(val);
4098 case IsTypeOp::Int: return is_int(val);
4099 case IsTypeOp::Dbl: return is_double(val);
4100 case IsTypeOp::Arr:
4101 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsArrayNotices &&
4102 !vmfp()->m_func->isBuiltin())) {
4103 if (isArrayOrShapeType(val->m_type)) {
4104 return true;
4105 } else if (isVecType(val->m_type)) {
4106 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_VEC_IS_ARR);
4107 } else if (isDictOrShapeType(val->m_type)) {
4108 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_DICT_IS_ARR);
4109 } else if (isKeysetType(val->m_type)) {
4110 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_KEYSET_IS_ARR);
4112 return false;
4114 return is_array(val);
4115 case IsTypeOp::Vec: {
4116 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsVecDictNotices)) {
4117 if (isArrayType(val->m_type)) {
4118 if (val->m_data.parr->isVArray()) {
4119 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_VARR_IS_VEC);
4121 return false;
4124 auto const ret = is_vec(val);
4125 if (ret && UNLIKELY(RuntimeOption::EvalLogArrayProvenance)) {
4126 raise_array_serialization_notice("is_vec", val->m_data.parr);
4128 return ret;
4130 case IsTypeOp::Dict: {
4131 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsVecDictNotices)) {
4132 if (isArrayOrShapeType(val->m_type)) {
4133 if (val->m_data.parr->isDArray()) {
4134 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_DARR_IS_DICT);
4136 return false;
4139 auto const ret = is_dict(val);
4140 if (ret && UNLIKELY(RuntimeOption::EvalLogArrayProvenance)) {
4141 raise_array_serialization_notice("is_dict", val->m_data.parr);
4143 return ret;
4145 case IsTypeOp::Keyset: return is_keyset(val);
4146 case IsTypeOp::Obj: return is_object(val);
4147 case IsTypeOp::Str: return is_string(val);
4148 case IsTypeOp::Res: return val->m_type == KindOfResource;
4149 case IsTypeOp::Scalar: return HHVM_FN(is_scalar)(tvAsCVarRef(val));
4150 case IsTypeOp::ArrLike:
4151 if (isClsMethType(val->m_type)) {
4152 if (RuntimeOption::EvalIsVecNotices) {
4153 raise_notice(Strings::CLSMETH_COMPAT_IS_ANY_ARR);
4155 return true;
4157 return isArrayLikeType(val->m_type);
4158 case IsTypeOp::VArray:
4159 assertx(!RuntimeOption::EvalHackArrDVArrs);
4160 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsVecDictNotices)) {
4161 if (isVecType(val->m_type)) {
4162 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_VEC_IS_VARR);
4163 return false;
4166 return is_varray(val);
4167 case IsTypeOp::DArray:
4168 assertx(!RuntimeOption::EvalHackArrDVArrs);
4169 if (UNLIKELY(RuntimeOption::EvalHackArrCompatIsVecDictNotices)) {
4170 if (isDictType(val->m_type)) {
4171 raise_hackarr_compat_notice(Strings::HACKARR_COMPAT_DICT_IS_DARR);
4172 return false;
4175 return is_darray(val);
4176 case IsTypeOp::ClsMeth: return is_clsmeth(val);
4177 case IsTypeOp::Func: return is_fun(val);
4179 not_reached();
4182 OPTBLD_INLINE void iopIsTypeL(local_var loc, IsTypeOp op) {
4183 if (loc.ptr->m_type == KindOfUninit) {
4184 raise_undefined_local(vmfp(), loc.index);
4186 vmStack().pushBool(isTypeHelper(tvToCell(loc.ptr), op));
4189 OPTBLD_INLINE void iopIsTypeC(IsTypeOp op) {
4190 auto val = vmStack().topC();
4191 vmStack().replaceC(make_tv<KindOfBoolean>(isTypeHelper(val, op)));
4194 OPTBLD_FLT_INLINE void iopAssertRATL(local_var loc, RepoAuthType rat) {
4195 if (debug) {
4196 auto const tv = *loc.ptr;
4197 auto const func = vmfp()->func();
4198 auto vm = &*g_context;
4199 always_assert_flog(
4200 tvMatchesRepoAuthType(tv, rat),
4201 "failed assert RATL on local {}: ${} in {}:{}, expected {}, got {}",
4202 loc.index,
4203 loc.index < func->numNamedLocals() ?
4204 func->localNames()[loc.index]->data() : "<unnamed>",
4205 vm->getContainingFileName()->data(),
4206 vm->getLine(),
4207 show(rat),
4208 toStringElm(&tv)
4213 OPTBLD_INLINE void iopAssertRATStk(uint32_t stkSlot, RepoAuthType rat) {
4214 if (debug) {
4215 auto const tv = *vmStack().indTV(stkSlot);
4216 auto vm = &*g_context;
4217 always_assert_flog(
4218 tvMatchesRepoAuthType(tv, rat),
4219 "failed assert RATStk {} in {}:{}, expected {}, got {}",
4220 stkSlot,
4221 vm->getContainingFileName()->data(),
4222 vm->getLine(),
4223 show(rat),
4224 toStringElm(&tv)
4229 OPTBLD_INLINE void iopBreakTraceHint() {
4232 OPTBLD_INLINE void iopEmptyL(local_var loc) {
4233 bool e = !cellToBool(*tvToCell(loc.ptr));
4234 vmStack().pushBool(e);
4237 OPTBLD_INLINE void iopEmptyG() {
4238 StringData* name;
4239 TypedValue* tv1 = vmStack().topTV();
4240 TypedValue* tv = nullptr;
4241 bool e;
4242 lookup_gbl(vmfp(), name, tv1, tv);
4243 SCOPE_EXIT { decRefStr(name); };
4244 if (tv == nullptr) {
4245 e = true;
4246 } else {
4247 e = !cellToBool(*tvToCell(tv));
4249 vmStack().replaceC<KindOfBoolean>(e);
4252 OPTBLD_INLINE void iopEmptyS() {
4253 SpropState ss(vmStack(), true);
4254 bool e;
4255 if (!(ss.visible && ss.accessible)) {
4256 e = true;
4257 } else {
4258 e = !cellToBool(*tvToCell(ss.val));
4260 ss.output->m_data.num = e;
4261 ss.output->m_type = KindOfBoolean;
4264 OPTBLD_INLINE void iopAKExists() {
4265 TypedValue* arr = vmStack().topTV();
4266 TypedValue* key = arr + 1;
4267 bool result = HHVM_FN(array_key_exists)(tvAsCVarRef(key), tvAsCVarRef(arr));
4268 vmStack().popTV();
4269 vmStack().replaceTV<KindOfBoolean>(result);
4272 OPTBLD_INLINE void iopGetMemoKeyL(local_var loc) {
4273 DEBUG_ONLY auto const func = vmfp()->m_func;
4274 assertx(func->isMemoizeWrapper());
4275 assertx(!func->anyByRef());
4277 assertx(tvIsPlausible(*loc.ptr));
4279 if (UNLIKELY(loc.ptr->m_type == KindOfUninit)) {
4280 tvWriteNull(*loc.ptr);
4281 raise_undefined_local(vmfp(), loc.index);
4283 auto const cell = tvToCell(loc.ptr);
4285 // Use the generic scheme, which is performed by
4286 // serialize_memoize_param.
4287 auto const key = HHVM_FN(serialize_memoize_param)(*cell);
4288 cellCopy(key, *vmStack().allocC());
4291 namespace {
4292 const StaticString s_idx("hh\\idx");
4294 TypedValue genericIdx(TypedValue obj, TypedValue key, TypedValue def) {
4295 static auto func = Unit::loadFunc(s_idx.get());
4296 assertx(func != nullptr);
4297 TypedValue args[] = {
4298 obj,
4299 key,
4302 return g_context->invokeFuncFew(func, nullptr, nullptr, 3, &args[0]);
4306 OPTBLD_INLINE void iopIdx() {
4307 TypedValue* def = vmStack().topTV();
4308 TypedValue* key = vmStack().indTV(1);
4309 TypedValue* arr = vmStack().indTV(2);
4311 TypedValue result;
4312 if (isArrayLikeType(arr->m_type)) {
4313 result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
4314 tvAsCVarRef(key),
4315 tvAsCVarRef(def));
4316 vmStack().popTV();
4317 } else if (isNullType(key->m_type)) {
4318 tvDecRefGen(arr);
4319 *arr = *def;
4320 vmStack().ndiscard(2);
4321 return;
4322 } else if (!isStringType(arr->m_type) &&
4323 arr->m_type != KindOfObject) {
4324 result = *def;
4325 vmStack().discard();
4326 } else {
4327 result = genericIdx(*arr, *key, *def);
4328 vmStack().popTV();
4330 vmStack().popTV();
4331 tvDecRefGen(arr);
4332 *arr = result;
4335 OPTBLD_INLINE void iopArrayIdx() {
4336 TypedValue* def = vmStack().topTV();
4337 TypedValue* key = vmStack().indTV(1);
4338 TypedValue* arr = vmStack().indTV(2);
4339 if (isClsMethType(type(arr))) {
4340 if (RuntimeOption::EvalHackArrDVArrs) {
4341 tvCastToVecInPlace(arr);
4342 } else {
4343 tvCastToVArrayInPlace(arr);
4346 auto const result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
4347 tvAsCVarRef(key),
4348 tvAsCVarRef(def));
4349 vmStack().popTV();
4350 vmStack().popTV();
4351 tvDecRefGen(arr);
4352 *arr = result;
4355 OPTBLD_INLINE void iopSetL(local_var to) {
4356 assertx(to.index < vmfp()->m_func->numLocals());
4357 Cell* fr = vmStack().topC();
4358 tvSet(*fr, *to);
4361 OPTBLD_INLINE void iopSetG() {
4362 StringData* name;
4363 Cell* fr = vmStack().topC();
4364 TypedValue* tv2 = vmStack().indTV(1);
4365 TypedValue* to = nullptr;
4366 lookupd_gbl(vmfp(), name, tv2, to);
4367 SCOPE_EXIT { decRefStr(name); };
4368 assertx(to != nullptr);
4369 tvSet(*fr, *to);
4370 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
4371 vmStack().discard();
4374 OPTBLD_INLINE void iopSetS() {
4375 TypedValue* tv1 = vmStack().topTV();
4376 Cell* clsCell = vmStack().indC(1);
4377 TypedValue* propn = vmStack().indTV(2);
4378 TypedValue* output = propn;
4379 StringData* name;
4380 TypedValue* val;
4381 bool visible, accessible, constant;
4382 Slot slot;
4384 if (!isClassType(clsCell->m_type)) {
4385 raise_error("Attempting static property access on non class");
4387 auto const cls = clsCell->m_data.pclass;
4389 lookup_sprop(vmfp(), cls, name, propn, val, slot, visible,
4390 accessible, constant, true);
4391 SCOPE_EXIT { decRefStr(name); };
4392 if (!(visible && accessible)) {
4393 raise_error("Invalid static property access: %s::%s",
4394 cls->name()->data(),
4395 name->data());
4397 if (constant) {
4398 throw_cannot_modify_static_const_prop(cls->name()->data(), name->data());
4400 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
4401 auto const& sprop = cls->staticProperties()[slot];
4402 auto const& tc = sprop.typeConstraint;
4403 if (tc.isCheckable()) tc.verifyStaticProperty(tv1, cls, sprop.cls, name);
4405 tvSet(*tv1, *val);
4406 tvDecRefGen(propn);
4407 memcpy(output, tv1, sizeof(TypedValue));
4408 vmStack().ndiscard(2);
4411 OPTBLD_INLINE void iopSetOpL(local_var loc, SetOpOp op) {
4412 Cell* fr = vmStack().topC();
4413 Cell* to = tvToCell(loc.ptr);
4414 setopBody(to, op, fr);
4415 tvDecRefGen(fr);
4416 cellDup(*to, *fr);
4419 OPTBLD_INLINE void iopSetOpG(SetOpOp op) {
4420 StringData* name;
4421 Cell* fr = vmStack().topC();
4422 TypedValue* tv2 = vmStack().indTV(1);
4423 TypedValue* to = nullptr;
4424 // XXX We're probably not getting warnings totally correct here
4425 lookupd_gbl(vmfp(), name, tv2, to);
4426 SCOPE_EXIT { decRefStr(name); };
4427 assertx(to != nullptr);
4428 setopBody(tvToCell(to), op, fr);
4429 tvDecRefGen(fr);
4430 tvDecRefGen(tv2);
4431 cellDup(*tvToCell(to), *tv2);
4432 vmStack().discard();
4435 OPTBLD_INLINE void iopSetOpS(SetOpOp op) {
4436 Cell* fr = vmStack().topC();
4437 Cell* clsCell = vmStack().indC(1);
4438 TypedValue* propn = vmStack().indTV(2);
4439 TypedValue* output = propn;
4440 StringData* name;
4441 TypedValue* val;
4442 bool visible, accessible, constant;
4443 Slot slot;
4445 if (!isClassType(clsCell->m_type)) {
4446 raise_error("Attempting static property access on non class");
4448 auto const cls = clsCell->m_data.pclass;
4450 lookup_sprop(vmfp(), cls, name, propn, val, slot, visible,
4451 accessible, constant, false);
4452 SCOPE_EXIT { decRefStr(name); };
4453 if (!(visible && accessible)) {
4454 raise_error("Invalid static property access: %s::%s",
4455 cls->name()->data(),
4456 name->data());
4458 if (constant) {
4459 throw_cannot_modify_static_const_prop(cls->name()->data(), name->data());
4461 val = tvToCell(val);
4462 auto const& sprop = cls->staticProperties()[slot];
4463 if (setOpNeedsTypeCheck(sprop.typeConstraint, op, val)) {
4464 Cell temp;
4465 cellDup(*val, temp);
4466 SCOPE_FAIL { tvDecRefGen(&temp); };
4467 setopBody(&temp, op, fr);
4468 sprop.typeConstraint.verifyStaticProperty(
4469 &temp, cls, sprop.cls, name
4471 cellMove(temp, *val);
4472 } else {
4473 setopBody(val, op, fr);
4476 tvDecRefGen(propn);
4477 tvDecRefGen(fr);
4478 cellDup(*val, *output);
4479 vmStack().ndiscard(2);
4482 OPTBLD_INLINE void iopIncDecL(local_var fr, IncDecOp op) {
4483 TypedValue* to = vmStack().allocTV();
4484 tvWriteUninit(*to);
4485 if (UNLIKELY(fr.ptr->m_type == KindOfUninit)) {
4486 raise_undefined_local(vmfp(), fr.index);
4487 tvWriteNull(*fr.ptr);
4488 } else {
4489 fr.ptr = tvToCell(fr.ptr);
4491 cellCopy(IncDecBody(op, fr.ptr), *to);
4494 OPTBLD_INLINE void iopIncDecG(IncDecOp op) {
4495 StringData* name;
4496 TypedValue* nameCell = vmStack().topTV();
4497 TypedValue* gbl = nullptr;
4498 lookupd_gbl(vmfp(), name, nameCell, gbl);
4499 auto oldNameCell = *nameCell;
4500 SCOPE_EXIT {
4501 decRefStr(name);
4502 tvDecRefGen(oldNameCell);
4504 assertx(gbl != nullptr);
4505 cellCopy(IncDecBody(op, tvToCell(gbl)), *nameCell);
4508 OPTBLD_INLINE void iopIncDecS(IncDecOp op) {
4509 SpropState ss(vmStack(), false);
4510 if (!(ss.visible && ss.accessible)) {
4511 raise_error("Invalid static property access: %s::%s",
4512 ss.cls->name()->data(),
4513 ss.name->data());
4515 if (ss.constant) {
4516 throw_cannot_modify_static_const_prop(ss.cls->name()->data(),
4517 ss.name->data());
4519 auto const checkable_sprop = [&]() -> const Class::SProp* {
4520 if (RuntimeOption::EvalCheckPropTypeHints <= 0) return nullptr;
4521 auto const& sprop = ss.cls->staticProperties()[ss.slot];
4522 return sprop.typeConstraint.isCheckable() ? &sprop : nullptr;
4523 }();
4525 auto const val = tvToCell(ss.val);
4526 if (checkable_sprop) {
4527 Cell temp;
4528 cellDup(*val, temp);
4529 SCOPE_FAIL { tvDecRefGen(&temp); };
4530 auto result = IncDecBody(op, &temp);
4531 SCOPE_FAIL { tvDecRefGen(&result); };
4532 checkable_sprop->typeConstraint.verifyStaticProperty(
4533 &temp,
4534 ss.cls,
4535 checkable_sprop->cls,
4536 ss.name
4538 cellMove(temp, *val);
4539 cellCopy(result, *ss.output);
4540 } else {
4541 cellCopy(IncDecBody(op, val), *ss.output);
4545 OPTBLD_INLINE void iopUnsetL(local_var loc) {
4546 tvUnset(*loc.ptr);
4549 OPTBLD_INLINE void iopUnsetG() {
4550 TypedValue* tv1 = vmStack().topTV();
4551 StringData* name = lookup_name(tv1);
4552 SCOPE_EXIT { decRefStr(name); };
4553 VarEnv* varEnv = g_context->m_globalVarEnv;
4554 assertx(varEnv != nullptr);
4555 varEnv->unset(name);
4556 vmStack().popC();
4559 bool doFCall(ActRec* ar, uint32_t numArgs, bool unpack) {
4560 TRACE(3, "FCall: pc %p func %p base %d\n", vmpc(),
4561 vmfp()->unit()->entry(),
4562 int(vmfp()->func()->base()));
4564 try {
4565 if (unpack) {
4566 Cell* c1 = vmStack().topC();
4567 if (UNLIKELY(!isContainer(*c1))) {
4568 Cell tmp = *c1;
4569 // argument_unpacking RFC dictates "containers and Traversables"
4570 raise_warning_unsampled("Only containers may be unpacked");
4571 *c1 = make_persistent_array_like_tv(staticEmptyVArray());
4572 tvDecRefGen(&tmp);
4575 Cell args = *c1;
4576 vmStack().discard(); // prepareArrayArgs will push args onto the stack
4577 SCOPE_EXIT { tvDecRefGen(&args); };
4578 checkStack(vmStack(), ar->func(), 0);
4580 assertx(!ar->resumed());
4581 prepareArrayArgs(ar, args, vmStack(), numArgs, /* checkRefAnnot */ true);
4584 prepareFuncEntry(
4586 unpack ? StackArgsState::Trimmed : StackArgsState::Untrimmed);
4587 } catch (...) {
4588 // If the callee's frame is still pre-live, free it explicitly.
4589 if (ar->m_sfp == vmfp()) {
4590 assertx(vmStack().top() <= (void*)ar);
4591 while (vmStack().top() != (void*)ar) {
4592 vmStack().popTV();
4594 vmStack().popAR();
4596 throw;
4599 if (UNLIKELY(!EventHook::FunctionCall(ar, EventHook::NormalFunc))) {
4600 return false;
4602 checkForReifiedGenericsErrors(ar);
4603 calleeDynamicCallChecks(ar);
4604 checkForRequiredCallM(ar);
4605 return true;
4608 namespace {
4610 ArrayData* getReifiedGenerics(const Func* func, const StringData* funcName,
4611 Array&& tsList) {
4612 if (!func->hasReifiedGenerics()) return nullptr;
4613 if (tsList.get()) {
4614 // As long as a tsList is passed, we'll use that over reading it from the
4615 // method name. The array-data passed on the stack may not be static.
4616 return ArrayData::GetScalarArray(std::move(tsList));
4618 if (isReifiedName(funcName)) {
4619 return getReifiedTypeList(stripClsOrFnNameFromReifiedName(funcName));
4621 raise_error(Strings::REIFIED_GENERICS_NOT_GIVEN, func->fullName()->data());
4624 template<bool dynamic, class InitActRec>
4625 void fcallImpl(PC origpc, PC& pc, const FCallArgs& fca, const Func* func,
4626 ArrayData* reifiedGenerics, InitActRec initActRec) {
4627 if (fca.enforceReffiness()) callerReffinessChecks(func, fca);
4628 if (dynamic) callerDynamicCallChecks(func);
4629 callerRxChecks(vmfp(), func);
4630 checkStack(vmStack(), func, 0);
4632 assertx(kNumActRecCells == 3);
4633 ActRec* ar = vmStack().indA(fca.numArgsInclUnpack());
4634 ar->m_func = func;
4635 ar->initNumArgs(fca.numArgsInclUnpack());
4636 if (dynamic) ar->setDynamicCall();
4637 if (fca.numRets != 1) ar->setFCallM();
4638 auto const asyncEagerReturn =
4639 fca.asyncEagerOffset != kInvalidOffset && func->supportsAsyncEagerReturn();
4640 if (asyncEagerReturn) ar->setAsyncEagerReturn();
4641 ar->setReturn(vmfp(), origpc, jit::tc::ustubs().retHelper);
4642 ar->trashVarEnv();
4643 if (reifiedGenerics != nullptr) ar->setReifiedGenerics(reifiedGenerics);
4645 initActRec(ar);
4647 doFCall(ar, fca.numArgs, fca.hasUnpack());
4648 pc = vmpc();
4651 OPTBLD_INLINE ActRec* fPushFuncImpl(
4652 const Func* func, int numArgs, ArrayData* reifiedGenerics
4654 assertx(kNumActRecCells == 3);
4655 ActRec* ar = vmStack().indA(numArgs);
4656 ar->m_func = func;
4657 ar->initNumArgs(numArgs);
4658 ar->trashVarEnv();
4659 if (reifiedGenerics != nullptr) ar->setReifiedGenerics(reifiedGenerics);
4660 return ar;
4663 ALWAYS_INLINE std::string concat_arg_list(imm_array<uint32_t> args) {
4664 auto const n = args.size;
4665 assertx(n != 0);
4666 std::string ret;
4667 folly::toAppend(args[0], &ret);
4668 for (int i = 1; i != n; ++i) folly::toAppend(";", args[i], &ret);
4669 return ret;
4672 } // namespace
4674 OPTBLD_INLINE void iopResolveFunc(Id id) {
4675 auto unit = vmfp()->m_func->unit();
4676 auto const nep = unit->lookupNamedEntityPairId(id);
4677 auto func = Unit::loadFunc(nep.second, nep.first);
4678 if (func == nullptr) raise_resolve_undefined(unit->lookupLitstrId(id));
4679 vmStack().pushFunc(func);
4682 OPTBLD_INLINE void iopFPushFunc(uint32_t numArgs, imm_array<uint32_t> args) {
4683 auto const n = args.size;
4684 std::string arglist;
4685 if (UNLIKELY(n)) {
4686 arglist = concat_arg_list(args);
4689 Cell* c1 = vmStack().topC();
4690 if (c1->m_type == KindOfObject) {
4691 // this covers both closures and functors
4692 static StringData* invokeName = makeStaticString("__invoke");
4693 ObjectData* origObj = c1->m_data.pobj;
4694 const Class* cls = origObj->getVMClass();
4695 auto const func = LIKELY(!n)
4696 ? cls->lookupMethod(invokeName)
4697 : cls->lookupMethod(
4698 makeStaticString(folly::sformat("__invoke${}$inout", arglist))
4700 if (func == nullptr) {
4701 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4704 callerRxChecks(vmfp(), func);
4706 vmStack().discard();
4707 ActRec* ar = fPushFuncImpl(func, numArgs, nullptr);
4708 if (func->isStaticInPrologue()) {
4709 ar->setClass(origObj->getVMClass());
4710 decRefObj(origObj);
4711 } else {
4712 ar->setThis(origObj);
4713 // Teleport the reference from the destroyed stack cell to the
4714 // ActRec. Don't try this at home.
4716 return;
4719 auto appendSuffix = [&] (const StringData* s) {
4720 return StringData::Make(s, folly::sformat("${}$inout", arglist));
4723 if (isArrayLikeType(c1->m_type) || isStringType(c1->m_type)) {
4724 Variant v = Variant::wrap(*c1);
4726 auto wrapInOutName = [&] (Cell* c, const StringData* mth) {
4727 VArrayInit ai{2};
4728 ai.append(c->m_data.parr->at(int64_t(0)));
4729 ai.append(Variant::attach(appendSuffix(mth)));
4730 return ai.toVariant();
4733 // Handle inout name mangling
4734 if (UNLIKELY(n)) {
4735 if (isStringType(c1->m_type)) {
4736 v = Variant::attach(appendSuffix(c1->m_data.pstr));
4737 } else if (c1->m_data.parr->size() == 2){
4738 auto s = c1->m_data.parr->at(1);
4739 if (isStringType(s.m_type)) {
4740 v = wrapInOutName(c1, s.m_data.pstr);
4741 } else if (isFuncType(s.m_type)) {
4742 v = wrapInOutName(c1, s.m_data.pfunc->fullDisplayName());
4747 // support:
4748 // array($instance, 'method')
4749 // array('Class', 'method'),
4750 // vec[$instance, 'method'],
4751 // vec['Class', 'method'],
4752 // array(Class*, Func*),
4753 // array(ObjectData*, Func*),
4754 // Func*,
4755 // 'func_name'
4756 // 'class::method'
4757 // which are all valid callables
4758 auto origCell = *c1;
4759 ObjectData* thiz = nullptr;
4760 HPHP::Class* cls = nullptr;
4761 StringData* invName = nullptr;
4762 bool dynamic = false;
4763 ArrayData* reifiedGenerics = nullptr;
4765 auto const func = vm_decode_function(
4767 vmfp(),
4768 thiz,
4769 cls,
4770 invName,
4771 dynamic,
4772 reifiedGenerics,
4773 DecodeFlags::NoWarn
4775 assertx(dynamic);
4776 if (func == nullptr) {
4777 if (isArrayLikeType(origCell.m_type)) {
4778 raise_error("Invalid callable (array)");
4779 } else {
4780 assertx(isStringType(origCell.m_type));
4781 raise_call_to_undefined(origCell.m_data.pstr);
4785 callerDynamicCallChecks(func);
4786 callerRxChecks(vmfp(), func);
4788 vmStack().discard();
4789 auto const ar = fPushFuncImpl(func, numArgs, reifiedGenerics);
4790 if (thiz) {
4791 thiz->incRefCount();
4792 ar->setThis(thiz);
4793 } else if (cls) {
4794 ar->setClass(cls);
4795 } else {
4796 ar->trashThis();
4799 ar->setDynamicCall();
4801 if (UNLIKELY(invName != nullptr)) {
4802 ar->setMagicDispatch(invName);
4804 if (isArrayLikeType(origCell.m_type)) {
4805 decRefArr(origCell.m_data.parr);
4806 } else if (origCell.m_type == KindOfString) {
4807 decRefStr(origCell.m_data.pstr);
4809 return;
4812 if (c1->m_type == KindOfFunc) {
4813 const Func* func = c1->m_data.pfunc;
4814 assertx(func != nullptr);
4815 if (func->cls()) {
4816 raise_error(Strings::CALL_ILLFORMED_FUNC);
4818 ArrayData* reifiedGenerics = nullptr;
4820 // Handle inout name mangling
4821 if (UNLIKELY(n)) {
4822 auto const func_name = func->fullDisplayName();
4823 auto const v = Variant::attach(appendSuffix(func_name));
4824 ObjectData* thiz = nullptr;
4825 Class* cls = nullptr;
4826 StringData* invName = nullptr;
4827 bool dynamic = false;
4828 func = vm_decode_function(
4830 vmfp(),
4831 thiz,
4832 cls,
4833 invName,
4834 dynamic,
4835 reifiedGenerics,
4836 DecodeFlags::NoWarn
4838 if (func == nullptr) raise_call_to_undefined(func_name);
4841 callerRxChecks(vmfp(), func);
4843 vmStack().discard();
4844 auto const ar = fPushFuncImpl(func, numArgs, reifiedGenerics);
4845 ar->trashThis();
4846 return;
4849 if (isClsMethType(c1->m_type)) {
4850 auto const clsMeth = c1->m_data.pclsmeth;
4851 assertx(clsMeth->getCls());
4852 assertx(clsMeth->getFunc());
4854 ArrayData* reifiedGenerics = nullptr;
4855 const Func* func = clsMeth->getFunc();
4856 ObjectData* thiz = nullptr;
4857 Class* cls = clsMeth->getCls();
4859 // Handle inout name mangling
4860 if (UNLIKELY(n)) {
4861 auto const func_name = func->fullDisplayName();
4862 auto const v = Variant::attach(appendSuffix(func_name));
4863 bool dynamic = false;
4864 StringData* invName = nullptr;
4865 func = vm_decode_function(
4867 vmfp(),
4868 thiz,
4869 cls,
4870 invName,
4871 dynamic,
4872 reifiedGenerics,
4873 DecodeFlags::NoWarn
4875 if (func == nullptr) raise_call_to_undefined(func_name);
4878 callerRxChecks(vmfp(), func);
4880 vmStack().popC();
4881 auto const ar = fPushFuncImpl(func, numArgs, reifiedGenerics);
4882 if (thiz) {
4883 ar->setThis(thiz);
4884 } else if (cls) {
4885 ar->setClass(cls);
4886 } else {
4887 ar->trashThis();
4890 return;
4893 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4896 namespace {
4898 void fPushFuncDImpl(uint32_t numArgs, Id id, ArrayData* tsList) {
4899 const NamedEntityPair nep =
4900 vmfp()->m_func->unit()->lookupNamedEntityPairId(id);
4901 Func* func = Unit::loadFunc(nep.second, nep.first);
4902 if (func == nullptr) {
4903 raise_call_to_undefined(vmfp()->m_func->unit()->lookupLitstrId(id));
4906 callerRxChecks(vmfp(), func);
4908 ActRec* ar = fPushFuncImpl(func, numArgs, tsList);
4909 ar->trashThis();
4912 } // namespace
4914 OPTBLD_FLT_INLINE void iopFPushFuncD(uint32_t numArgs, Id id) {
4915 fPushFuncDImpl(numArgs, id, nullptr);
4918 OPTBLD_FLT_INLINE void iopFPushFuncRD(uint32_t numArgs, Id id) {
4919 auto const tsList = *vmStack().topC();
4920 assertx(tvIsVecOrVArray(tsList));
4921 // no need to decref since it will will be stored on actrec
4922 vmStack().discard();
4923 fPushFuncDImpl(numArgs, id, tsList.m_data.parr);
4926 namespace {
4928 template<bool dynamic>
4929 void fcallObjMethodImpl(PC origpc, PC& pc, const FCallArgs& fca,
4930 StringData* methName, Array&& tsList) {
4931 auto const numArgs = fca.numArgsInclUnpack();
4932 const Func* func;
4933 LookupResult res;
4934 assertx(tvIsObject(vmStack().indC(numArgs + 2)));
4935 auto const obj = vmStack().indC(numArgs + 2)->m_data.pobj;
4936 auto cls = obj->getVMClass();
4937 // if lookup throws, obj will be decref'd via stack
4938 res = lookupObjMethod(
4939 func, cls, methName, arGetContextClass(vmfp()), true);
4940 assertx(func);
4941 if (res == LookupResult::MethodFoundNoThis) {
4942 throw_has_this_need_static(func);
4944 assertx(res == LookupResult::MethodFoundWithThis ||
4945 res == LookupResult::MagicCallFound);
4947 auto const reifiedGenerics = getReifiedGenerics(
4948 func, methName, std::move(tsList));
4950 fcallImpl<dynamic>(origpc, pc, fca, func, reifiedGenerics, [&] (ActRec* ar) {
4951 /* Transfer ownership of obj to the ActRec*/
4952 ar->setThis(obj);
4954 if (res == LookupResult::MagicCallFound) {
4955 assertx(!func->hasReifiedGenerics());
4956 ar->setMagicDispatch(methName);
4957 } else {
4958 decRefStr(methName);
4963 static void raise_resolve_non_object(const char* methodName,
4964 const char* typeName = nullptr) {
4965 auto const msg = folly::sformat(
4966 "Cannot resolve a member function {}() on a non-object ({})",
4967 methodName, typeName
4970 raise_fatal_error(msg.c_str());
4973 static void throw_call_non_object(const char* methodName,
4974 const char* typeName = nullptr) {
4975 std::string msg;
4976 folly::format(&msg, "Call to a member function {}() on a non-object ({})",
4977 methodName, typeName);
4979 if (RuntimeOption::ThrowExceptionOnBadMethodCall) {
4980 SystemLib::throwBadMethodCallExceptionObject(String(msg));
4982 raise_fatal_error(msg.c_str());
4985 ALWAYS_INLINE StringData* mangleInOutName(
4986 const StringData* name,
4987 imm_array<uint32_t> args
4989 return
4990 StringData::Make(
4991 name, folly::sformat("${}$inout", concat_arg_list(args))
4995 ALWAYS_INLINE bool
4996 fcallObjMethodHandleInput(const FCallArgs& fca, ObjMethodOp op,
4997 const StringData* methName, bool extraStk) {
4998 Cell* obj = vmStack().indC(fca.numArgsInclUnpack() + 2 + (extraStk ? 1 : 0));
4999 if (LIKELY(isObjectType(obj->m_type))) return false;
5001 if (UNLIKELY(op == ObjMethodOp::NullThrows || !isNullType(obj->m_type))) {
5002 auto const dataTypeStr = getDataTypeString(obj->m_type).get();
5003 throw_call_non_object(methName->data(), dataTypeStr->data());
5006 // null?->method(...), pop extra stack input, all arguments and two uninits,
5007 // the null "object" and all uninits for inout returns, then push null.
5008 auto& stack = vmStack();
5009 if (extraStk) stack.popC();
5010 if (fca.hasUnpack()) stack.popC();
5011 for (uint32_t i = 0; i < fca.numArgs; ++i) stack.popTV();
5012 stack.popU();
5013 stack.popU();
5014 stack.popC();
5015 for (uint32_t i = 0; i < fca.numRets - 1; ++i) stack.popU();
5016 stack.pushNull();
5018 // Handled.
5019 return true;
5022 } // namespace
5024 OPTBLD_INLINE void
5025 iopFCallObjMethod(PC origpc, PC& pc, FCallArgs fca, const StringData*,
5026 ObjMethodOp op, imm_array<uint32_t> args) {
5027 Cell* c1 = vmStack().topC(); // Method name.
5028 if (!isStringType(c1->m_type)) {
5029 raise_error(Strings::METHOD_NAME_MUST_BE_STRING);
5032 StringData* methName = c1->m_data.pstr;
5033 if (fcallObjMethodHandleInput(fca, op, methName, true)) return;
5035 if (UNLIKELY(args.size)) {
5036 String s = String::attach(methName);
5037 methName = mangleInOutName(methName, args);
5040 // We handle decReffing method name in fcallObjMethodImpl
5041 vmStack().discard();
5042 fcallObjMethodImpl<true>(origpc, pc, fca, methName, Array());
5045 OPTBLD_INLINE void
5046 iopFCallObjMethodD(PC origpc, PC& pc, FCallArgs fca, const StringData*,
5047 ObjMethodOp op, const StringData* methName) {
5048 if (fcallObjMethodHandleInput(fca, op, methName, false)) return;
5049 auto const methNameC = const_cast<StringData*>(methName);
5050 fcallObjMethodImpl<false>(origpc, pc, fca, methNameC, Array());
5053 OPTBLD_INLINE void
5054 iopFCallObjMethodRD(PC origpc, PC& pc, FCallArgs fca, const StringData*,
5055 ObjMethodOp op, const StringData* methName) {
5056 if (fcallObjMethodHandleInput(fca, op, methName, true)) return;
5057 assertx(tvIsVecOrVArray(vmStack().topC()));
5058 auto const methNameC = const_cast<StringData*>(methName);
5059 auto tsList = Array::attach(vmStack().topC()->m_data.parr);
5060 vmStack().discard();
5061 fcallObjMethodImpl<false>(origpc, pc, fca, methNameC, std::move(tsList));
5064 namespace {
5065 void resolveMethodImpl(Cell* c1, Cell* c2) {
5066 auto name = c1->m_data.pstr;
5067 ObjectData* thiz = nullptr;
5068 HPHP::Class* cls = nullptr;
5069 StringData* invName = nullptr;
5070 bool dynamic = false;
5071 ArrayData* reifiedGenerics = nullptr;
5072 auto arr = make_varray(cellAsVariant(*c2), cellAsVariant(*c1));
5073 auto const func = vm_decode_function(
5074 Variant{arr},
5075 vmfp(),
5076 thiz,
5077 cls,
5078 invName,
5079 dynamic,
5080 reifiedGenerics,
5081 DecodeFlags::NoWarn
5083 assertx(dynamic);
5084 if (!func) raise_error("Failure to resolve method name \'%s\'", name->data());
5085 if (invName) {
5086 SystemLib::throwInvalidOperationExceptionObject(
5087 "Unable to resolve magic call for inst_meth()");
5089 if (thiz) {
5090 assertx(isObjectType(type(c2)));
5091 assertx(!(func->attrs() & AttrStatic));
5092 assertx(val(c2).pobj == thiz);
5093 } else {
5094 assertx(cls);
5095 assertx(func->attrs() & AttrStatic);
5096 arr.set(0, Variant{cls});
5098 arr.set(1, Variant{func});
5099 vmStack().popC();
5100 vmStack().popC();
5101 if (RuntimeOption::EvalHackArrDVArrs) {
5102 vmStack().pushVecNoRc(arr.detach());
5103 } else {
5104 vmStack().pushArrayNoRc(arr.detach());
5109 OPTBLD_INLINE void iopResolveClsMethod() {
5110 Cell* func = vmStack().topC();
5111 Cell* cls = vmStack().indC(1);
5112 if (!isStringType(func->m_type) || !isStringType(cls->m_type)) {
5113 raise_error(!isStringType(func->m_type) ?
5114 Strings::METHOD_NAME_MUST_BE_STRING : "class name must be a string.");
5117 StringData* invName = nullptr;
5118 auto const decoded_func = decode_for_clsmeth(
5119 StrNR{val(cls).pstr}, StrNR{val(func).pstr}, vmfp(), invName,
5120 DecodeFlags::NoWarn);
5121 if (!decoded_func.first || !decoded_func.second) {
5122 if (!decoded_func.first) {
5123 raise_error("Failure to resolve class name \'%s\'",
5124 val(cls).pstr->data());
5125 } else {
5126 raise_error(
5127 "Failure to resolve method name \'%s::%s\'",
5128 decoded_func.first->name()->data(), val(func).pstr->data());
5131 if (invName) {
5132 SystemLib::throwInvalidOperationExceptionObject(
5133 "Unable to resolve magic call for class_meth()");
5136 ClsMethDataRef clsMeth =
5137 ClsMethDataRef::create(decoded_func.first, decoded_func.second);
5138 vmStack().popC();
5139 vmStack().popC();
5140 vmStack().pushClsMethNoRc(clsMeth);
5143 OPTBLD_INLINE void iopResolveObjMethod() {
5144 Cell* c1 = vmStack().topC();
5145 Cell* c2 = vmStack().indC(1);
5146 if (!isStringType(c1->m_type)) {
5147 raise_error(Strings::METHOD_NAME_MUST_BE_STRING);
5149 auto name = c1->m_data.pstr;
5150 if (!isObjectType(c2->m_type)) {
5151 raise_resolve_non_object(name->data(),
5152 getDataTypeString(c2->m_type).get()->data());
5154 resolveMethodImpl(c1, c2);
5157 namespace {
5159 template<bool dynamic>
5160 void fcallClsMethodImpl(PC origpc, PC& pc, const FCallArgs& fca, Class* cls,
5161 StringData* methName, bool forwarding, Array&& tsList) {
5162 auto const ctx = liveClass();
5163 auto obj = ctx && vmfp()->hasThis() ? vmfp()->getThis() : nullptr;
5164 const Func* func;
5165 auto const res = lookupClsMethod(func, cls, methName, obj, ctx, true);
5166 assertx(func);
5168 if (res == LookupResult::MethodFoundNoThis) {
5169 if (!func->isStaticInPrologue()) {
5170 throw_missing_this(func);
5172 obj = nullptr;
5173 } else {
5174 assertx(obj);
5175 assertx(res == LookupResult::MethodFoundWithThis ||
5176 res == LookupResult::MagicCallFound);
5179 auto const reifiedGenerics = getReifiedGenerics(
5180 func, methName, std::move(tsList));
5182 fcallImpl<dynamic>(origpc, pc, fca, func, reifiedGenerics, [&] (ActRec* ar) {
5183 if (obj) {
5184 obj->incRefCount();
5185 ar->setThis(obj);
5186 } else {
5187 if (forwarding && ctx) {
5188 /* Propagate the current late bound class if there is one, */
5189 /* otherwise use the class given by this instruction's input */
5190 if (vmfp()->hasThis()) {
5191 cls = vmfp()->getThis()->getVMClass();
5192 } else {
5193 cls = vmfp()->getClass();
5196 ar->setClass(cls);
5199 if (res == LookupResult::MagicCallFound) {
5200 assertx(!func->hasReifiedGenerics());
5201 ar->setMagicDispatch(methName);
5202 } else {
5203 decRefStr(const_cast<StringData*>(methName));
5208 Class* specialClsRefToCls(SpecialClsRef ref) {
5209 switch (ref) {
5210 case SpecialClsRef::Static:
5211 if (auto const cls = frameStaticClass(vmfp())) return cls;
5212 raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
5213 case SpecialClsRef::Self:
5214 if (auto const cls = arGetContextClass(vmfp())) return cls;
5215 raise_error(HPHP::Strings::CANT_ACCESS_SELF);
5216 case SpecialClsRef::Parent:
5217 if (auto const cls = arGetContextClass(vmfp())) {
5218 if (auto const parent = cls->parent()) return parent;
5219 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
5221 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
5223 always_assert(false);
5228 OPTBLD_INLINE void
5229 iopFCallClsMethod(PC origpc, PC& pc, FCallArgs fca, const StringData*,
5230 imm_array<uint32_t> args) {
5231 auto const c1 = vmStack().topC();
5232 if (!isClassType(c1->m_type)) {
5233 raise_error("Attempting to use non-class in FCallClsMethod");
5235 auto const cls = c1->m_data.pclass;
5237 auto const c2 = vmStack().indC(1); // Method name.
5238 if (!isStringType(c2->m_type)) {
5239 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5241 auto methName = c2->m_data.pstr;
5243 if (UNLIKELY(args.size)) {
5244 String s = String::attach(methName);
5245 methName = mangleInOutName(methName, args);
5248 // fcallClsMethodImpl will take care of decReffing method name
5249 vmStack().ndiscard(2);
5250 assertx(cls && methName);
5251 fcallClsMethodImpl<true>(origpc, pc, fca, cls, methName, false, Array());
5255 namespace {
5257 ALWAYS_INLINE void
5258 fcallClsMethodDImpl(PC origpc, PC& pc, const FCallArgs& fca,
5259 Id classId, const StringData* methName, Array&& tsList) {
5260 const NamedEntityPair &nep =
5261 vmfp()->m_func->unit()->lookupNamedEntityPairId(classId);
5262 Class* cls = Unit::loadClass(nep.second, nep.first);
5263 if (cls == nullptr) {
5264 raise_error(Strings::UNKNOWN_CLASS, nep.first->data());
5266 auto const methNameC = const_cast<StringData*>(methName);
5267 fcallClsMethodImpl<false>(origpc, pc, fca, cls, methNameC, false,
5268 std::move(tsList));
5271 } // namespace
5273 OPTBLD_INLINE void
5274 iopFCallClsMethodD(PC origpc, PC& pc, FCallArgs fca, const StringData*,
5275 Id classId, const StringData* methName) {
5276 fcallClsMethodDImpl(origpc, pc, fca, classId, methName, Array());
5279 OPTBLD_INLINE void
5280 iopFCallClsMethodRD(PC origpc, PC& pc, FCallArgs fca, const StringData*,
5281 Id classId, const StringData* methName) {
5282 assertx(tvIsVecOrVArray(vmStack().topC()));
5283 auto tsList = Array::attach(vmStack().topC()->m_data.parr);
5284 vmStack().discard();
5285 fcallClsMethodDImpl(origpc, pc, fca, classId, methName, std::move(tsList));
5288 OPTBLD_INLINE void
5289 iopFCallClsMethodS(PC origpc, PC& pc, FCallArgs fca, const StringData*,
5290 SpecialClsRef ref, imm_array<uint32_t> args) {
5291 auto const c1 = vmStack().topC(); // Method name.
5292 if (!isStringType(c1->m_type)) {
5293 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
5295 auto const cls = specialClsRefToCls(ref);
5296 auto methName = c1->m_data.pstr;
5298 if (UNLIKELY(args.size)) {
5299 String s = String::attach(methName);
5300 methName = mangleInOutName(methName, args);
5303 // fcallClsMethodImpl will take care of decReffing name
5304 vmStack().ndiscard(1);
5305 auto const fwd = ref == SpecialClsRef::Self || ref == SpecialClsRef::Parent;
5306 fcallClsMethodImpl<true>(origpc, pc, fca, cls, methName, fwd, Array());
5309 OPTBLD_INLINE void
5310 iopFCallClsMethodSD(PC origpc, PC& pc, FCallArgs fca, const StringData*,
5311 SpecialClsRef ref, const StringData* methName) {
5312 auto const cls = specialClsRefToCls(ref);
5313 auto const methNameC = const_cast<StringData*>(methName);
5314 auto const fwd = ref == SpecialClsRef::Self || ref == SpecialClsRef::Parent;
5315 fcallClsMethodImpl<false>(origpc, pc, fca, cls, methNameC, fwd, Array());
5318 OPTBLD_INLINE void
5319 iopFCallClsMethodSRD(PC origpc, PC& pc, FCallArgs fca, const StringData*,
5320 SpecialClsRef ref, const StringData* methName) {
5321 assertx(tvIsVecOrVArray(vmStack().topC()));
5322 auto const cls = specialClsRefToCls(ref);
5323 auto const methNameC = const_cast<StringData*>(methName);
5324 auto const fwd = ref == SpecialClsRef::Self || ref == SpecialClsRef::Parent;
5325 auto tsList = Array::attach(vmStack().topC()->m_data.parr);
5326 vmStack().discard();
5327 fcallClsMethodImpl<false>(origpc, pc, fca, cls, methNameC, fwd,
5328 std::move(tsList));
5331 namespace {
5333 ObjectData* newObjImpl(Class* cls, ArrayData* reified_types) {
5334 // Replace input with uninitialized instance.
5335 auto this_ = reified_types
5336 ? ObjectData::newInstanceReified<true>(cls, reified_types)
5337 : ObjectData::newInstance<true>(cls);
5338 TRACE(2, "NewObj: just new'ed an instance of class %s: %p\n",
5339 cls->name()->data(), this_);
5340 return this_;
5343 void newObjDImpl(Id id, ArrayData* reified_types) {
5344 const NamedEntityPair &nep =
5345 vmfp()->m_func->unit()->lookupNamedEntityPairId(id);
5346 auto cls = Unit::loadClass(nep.second, nep.first);
5347 if (cls == nullptr) {
5348 raise_error(Strings::UNKNOWN_CLASS,
5349 vmfp()->m_func->unit()->lookupLitstrId(id)->data());
5351 auto this_ = newObjImpl(cls, reified_types);
5352 if (reified_types) vmStack().popC();
5353 vmStack().pushObjectNoRc(this_);
5356 } // namespace
5358 OPTBLD_INLINE void iopNewObj() {
5359 auto const clsCell = vmStack().topC();
5360 if (!isClassType(clsCell->m_type)) {
5361 raise_error("Attempting NewObj with non-class");
5363 auto const cls = clsCell->m_data.pclass;
5365 callerDynamicConstructChecks(cls);
5366 auto this_ = newObjImpl(cls, nullptr);
5367 vmStack().popC();
5368 vmStack().pushObjectNoRc(this_);
5371 OPTBLD_INLINE void iopNewObjR() {
5372 auto const reifiedCell = vmStack().topC();
5373 auto const clsCell = vmStack().indC(1);
5375 if (!isClassType(clsCell->m_type)) {
5376 raise_error("Attempting NewObjR with non-class");
5378 auto const cls = clsCell->m_data.pclass;
5380 auto const reified = [&] () -> ArrayData* {
5381 if (reifiedCell->m_type == KindOfNull) return nullptr;
5382 if (!tvIsVecOrVArray(reifiedCell)) {
5383 raise_error("Attempting NewObjR with invalid reified generics");
5385 return reifiedCell->m_data.parr;
5386 }();
5388 callerDynamicConstructChecks(cls);
5389 auto this_ = newObjImpl(cls, reified);
5390 vmStack().popC();
5391 vmStack().popC();
5392 vmStack().pushObjectNoRc(this_);
5395 OPTBLD_INLINE void iopNewObjD(Id id) {
5396 newObjDImpl(id, nullptr);
5399 OPTBLD_INLINE void iopNewObjRD(Id id) {
5400 auto const tsList = vmStack().topC();
5402 auto const reified = [&] () -> ArrayData* {
5403 if (tsList->m_type == KindOfNull) return nullptr;
5404 if (!tvIsVecOrVArray(tsList)) {
5405 raise_error("Attempting NewObjRD with invalid reified generics");
5407 return tsList->m_data.parr;
5408 }();
5409 newObjDImpl(id, reified);
5412 OPTBLD_INLINE void iopNewObjS(SpecialClsRef ref) {
5413 auto const cls = specialClsRefToCls(ref);
5414 if (ref == SpecialClsRef::Static && cls->hasReifiedGenerics()) {
5415 raise_error(Strings::NEW_STATIC_ON_REIFIED_CLASS, cls->name()->data());
5417 auto const reified_generics = cls->hasReifiedGenerics()
5418 ? getClsReifiedGenericsProp(cls, vmfp()) : nullptr;
5419 auto this_ = newObjImpl(cls, reified_generics);
5420 vmStack().pushObjectNoRc(this_);
5423 OPTBLD_INLINE void iopFCallCtor(PC origpc, PC& pc, FCallArgs fca,
5424 const StringData*) {
5425 assertx(fca.numRets == 1);
5426 assertx(fca.asyncEagerOffset == kInvalidOffset);
5427 auto const numArgs = fca.numArgsInclUnpack();
5428 assertx(tvIsObject(vmStack().indC(numArgs + 2)));
5429 auto const obj = vmStack().indC(numArgs + 2)->m_data.pobj;
5431 const Func* func;
5432 auto const ctx = arGetContextClass(vmfp());
5433 auto const res UNUSED = lookupCtorMethod(func, obj->getVMClass(), ctx, true);
5434 assertx(res == LookupResult::MethodFoundWithThis);
5436 fcallImpl<false>(origpc, pc, fca, func, nullptr, [&] (ActRec* ar) {
5437 /* Transfer ownership of obj to the ActRec*/
5438 ar->setThis(obj);
5442 OPTBLD_INLINE void iopLockObj() {
5443 auto c1 = vmStack().topC();
5444 if (!tvIsObject(*c1)) raise_error("LockObj: expected an object");
5445 c1->m_data.pobj->lockObject();
5448 namespace {
5450 // Find the AR for the current FPI region by indexing from sp
5451 inline ActRec* arFromSp(int32_t n) {
5452 return reinterpret_cast<ActRec*>(vmStack().top() + n);
5457 bool doFCallUnpackTC(PC origpc, int32_t numArgsInclUnpack, void* retAddr) {
5458 assert_native_stack_aligned();
5459 assertx(tl_regState == VMRegState::DIRTY);
5460 tl_regState = VMRegState::CLEAN;
5461 auto const ar = arFromSp(numArgsInclUnpack);
5462 assertx(ar->numArgs() == numArgsInclUnpack);
5463 ar->setReturn(vmfp(), origpc, jit::tc::ustubs().retHelper);
5464 ar->setJitReturn(retAddr);
5465 auto const ret = doFCall(ar, numArgsInclUnpack - 1, true);
5466 tl_regState = VMRegState::DIRTY;
5467 return ret;
5470 OPTBLD_FLT_INLINE
5471 void iopFCall(PC origpc, PC& pc, FCallArgs fca,
5472 const StringData* /*clsName*/, const StringData* funcName) {
5473 auto const ar = arFromSp(fca.numArgsInclUnpack());
5474 auto const func = ar->func();
5475 assertx(
5476 funcName->empty() ||
5477 RuntimeOption::EvalJitEnableRenameFunction ||
5478 (func->attrs() & AttrInterceptable) ||
5479 func->name()->isame(funcName)
5481 assertx(fca.numArgsInclUnpack() == ar->numArgs());
5482 if (fca.enforceReffiness()) callerReffinessChecks(func, fca);
5483 checkStack(vmStack(), func, 0);
5484 if (fca.numRets != 1) ar->setFCallM();
5485 auto const asyncEagerReturn =
5486 fca.asyncEagerOffset != kInvalidOffset && func->supportsAsyncEagerReturn();
5487 if (asyncEagerReturn) ar->setAsyncEagerReturn();
5488 ar->setReturn(vmfp(), origpc, jit::tc::ustubs().retHelper);
5489 doFCall(ar, fca.numArgs, fca.hasUnpack());
5490 pc = vmpc();
5493 OPTBLD_FLT_INLINE
5494 void iopFCallBuiltin(
5495 uint32_t numArgs, uint32_t numNonDefault, uint32_t numOut, Id id
5497 auto const ne = vmfp()->m_func->unit()->lookupNamedEntityId(id);
5498 auto const func = ne->uniqueFunc();
5499 if (func == nullptr || !func->isBuiltin()) {
5500 raise_error("Call to undefined function %s()",
5501 vmfp()->m_func->unit()->lookupLitstrId(id)->data());
5504 if (func->numInOutParams() != numOut) {
5505 raise_error("Call to function %s() with incorrectly annotated inout "
5506 "parameter", func->fullDisplayName()->data());
5509 callerRxChecks(vmfp(), func);
5510 assertx(!func->isMethod() || (func->isStatic() && func->cls()));
5511 auto const ctx = func->isStatic() ? func->cls() : nullptr;
5513 TypedValue* args = vmStack().indTV(numArgs-1);
5514 TypedValue ret;
5515 Native::coerceFCallArgs(args, numArgs, numNonDefault, func);
5517 if (func->hasVariadicCaptureParam()) {
5518 assertx(numArgs > 0);
5519 assertx(
5520 RuntimeOption::EvalHackArrDVArrs
5521 ? isVecType(args[1 - safe_cast<int32_t>(numArgs)].m_type)
5522 : isArrayType(args[1 - safe_cast<int32_t>(numArgs)].m_type)
5525 Native::callFunc(func, ctx, args, numNonDefault, ret, true);
5527 frame_free_args(args, numNonDefault);
5528 vmStack().ndiscard(numArgs);
5530 if (RuntimeOption::EvalArrayProvenance &&
5531 !func->isProvenanceSkipFrame()) {
5532 ret = arrprov::tagTV(ret);
5534 tvCopy(ret, *vmStack().allocTV());
5537 namespace {
5539 template <bool Local, bool Pop>
5540 bool initIterator(PC& pc, PC targetpc, Iter* it, Cell* c1) {
5541 if (isClsMethType(type(c1))) {
5542 raise_error(
5543 "Invalid operand type was used: expects iterable, clsmeth was given");
5545 auto const hasElems = it->init<Local>(c1);
5546 if (!hasElems) pc = targetpc;
5547 if (Pop) vmStack().popC();
5548 return hasElems;
5553 OPTBLD_INLINE void iopIterInit(PC& pc, Iter* it, PC targetpc, local_var val) {
5554 Cell* c1 = vmStack().topC();
5555 if (initIterator<false, true>(pc, targetpc, it, c1)) {
5556 tvAsVariant(val.ptr) = it->arr().second();
5560 OPTBLD_INLINE
5561 void iopIterInitK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5562 Cell* c1 = vmStack().topC();
5563 if (initIterator<false, true>(pc, targetpc, it, c1)) {
5564 tvAsVariant(val.ptr) = it->arr().second();
5565 tvAsVariant(key.ptr) = it->arr().first();
5569 OPTBLD_INLINE void iopLIterInit(PC& pc, Iter* it, local_var local,
5570 PC targetpc, local_var val) {
5571 if (isArrayLikeType(local.ptr->m_type)) {
5572 if (initIterator<true, false>(pc, targetpc, it, tvAssertCell(local.ptr))) {
5573 tvAsVariant(val.ptr) = it->arr().secondLocal(local.ptr->m_data.parr);
5575 return;
5578 if (initIterator<false, false>(pc, targetpc, it, tvToCell(local.ptr))) {
5579 tvAsVariant(val.ptr) = it->arr().second();
5583 OPTBLD_INLINE void iopLIterInitK(PC& pc, Iter* it, local_var local,
5584 PC targetpc, local_var val, local_var key) {
5585 if (isArrayLikeType(local.ptr->m_type)) {
5586 if (initIterator<true, false>(pc, targetpc, it, tvAssertCell(local.ptr))) {
5587 tvAsVariant(val.ptr) = it->arr().secondLocal(local.ptr->m_data.parr);
5588 tvAsVariant(key.ptr) = it->arr().firstLocal(local.ptr->m_data.parr);
5590 return;
5593 if (initIterator<false, false>(pc, targetpc, it, tvToCell(local.ptr))) {
5594 tvAsVariant(val.ptr) = it->arr().second();
5595 tvAsVariant(key.ptr) = it->arr().first();
5599 OPTBLD_INLINE void iopIterNext(PC& pc, Iter* it, PC targetpc, local_var val) {
5600 if (it->next()) {
5601 vmpc() = targetpc;
5602 jmpSurpriseCheck(targetpc - pc);
5603 pc = targetpc;
5604 tvAsVariant(val.ptr) = it->arr().second();
5608 OPTBLD_INLINE
5609 void iopIterNextK(PC& pc, Iter* it, PC targetpc, local_var val, local_var key) {
5610 if (it->next()) {
5611 vmpc() = targetpc;
5612 jmpSurpriseCheck(targetpc - pc);
5613 pc = targetpc;
5614 tvAsVariant(val.ptr) = it->arr().second();
5615 tvAsVariant(key.ptr) = it->arr().first();
5619 OPTBLD_INLINE void iopLIterNext(PC& pc,
5620 Iter* it,
5621 local_var base,
5622 PC targetpc,
5623 local_var val) {
5624 if (isArrayLikeType(base.ptr->m_type)) {
5625 if (it->nextLocal(base.ptr->m_data.parr)) {
5626 vmpc() = targetpc;
5627 jmpSurpriseCheck(targetpc - pc);
5628 pc = targetpc;
5629 tvAsVariant(val.ptr) = it->arr().secondLocal(base.ptr->m_data.parr);
5631 } else if (it->next()) {
5632 vmpc() = targetpc;
5633 jmpSurpriseCheck(targetpc - pc);
5634 pc = targetpc;
5635 tvAsVariant(val.ptr) = it->arr().second();
5639 OPTBLD_INLINE void iopLIterNextK(PC& pc,
5640 Iter* it,
5641 local_var base,
5642 PC targetpc,
5643 local_var val,
5644 local_var key) {
5645 if (isArrayLikeType(base.ptr->m_type)) {
5646 if (it->nextLocal(base.ptr->m_data.parr)) {
5647 vmpc() = targetpc;
5648 jmpSurpriseCheck(targetpc - pc);
5649 pc = targetpc;
5650 tvAsVariant(val.ptr) = it->arr().secondLocal(base.ptr->m_data.parr);
5651 tvAsVariant(key.ptr) = it->arr().firstLocal(base.ptr->m_data.parr);
5653 } else if (it->next()) {
5654 vmpc() = targetpc;
5655 jmpSurpriseCheck(targetpc - pc);
5656 pc = targetpc;
5657 tvAsVariant(val.ptr) = it->arr().second();
5658 tvAsVariant(key.ptr) = it->arr().first();
5662 OPTBLD_INLINE void iopIterFree(Iter* it) {
5663 it->free();
5666 OPTBLD_INLINE void iopLIterFree(Iter* it, local_var) {
5667 it->free();
5670 OPTBLD_INLINE void inclOp(PC origpc, PC& pc, InclOpFlags flags,
5671 const char* opName) {
5672 Cell* c1 = vmStack().topC();
5673 auto path = String::attach(prepareKey(*c1));
5674 bool initial;
5675 TRACE(2, "inclOp %s %s %s %s \"%s\"\n",
5676 flags & InclOpFlags::Once ? "Once" : "",
5677 flags & InclOpFlags::DocRoot ? "DocRoot" : "",
5678 flags & InclOpFlags::Relative ? "Relative" : "",
5679 flags & InclOpFlags::Fatal ? "Fatal" : "",
5680 path.data());
5682 auto curUnitFilePath = [&] {
5683 namespace fs = boost::filesystem;
5684 fs::path currentUnit(vmfp()->m_func->unit()->filepath()->data());
5685 fs::path currentDir(currentUnit.branch_path());
5686 return currentDir.string();
5689 auto const unit = [&] {
5690 if (flags & InclOpFlags::Relative) {
5691 String absPath = curUnitFilePath() + '/';
5692 absPath += path;
5693 return lookupUnit(absPath.get(), "", &initial,
5694 Native::s_noNativeFuncs, false);
5696 if (flags & InclOpFlags::DocRoot) {
5697 return lookupUnit(
5698 SourceRootInfo::RelativeToPhpRoot(path).get(), "", &initial,
5699 Native::s_noNativeFuncs, false);
5701 return lookupUnit(path.get(), curUnitFilePath().c_str(), &initial,
5702 Native::s_noNativeFuncs, false);
5703 }();
5705 vmStack().popC();
5706 if (unit == nullptr) {
5707 if (flags & InclOpFlags::Fatal) {
5708 raise_error("%s(%s): File not found", opName, path.data());
5709 } else {
5710 raise_warning("%s(%s): File not found", opName, path.data());
5712 vmStack().pushBool(false);
5713 return;
5716 if (!(flags & InclOpFlags::Once) || initial) {
5717 g_context->evalUnit(unit, origpc, pc, EventHook::PseudoMain);
5718 } else {
5719 Stats::inc(Stats::PseudoMain_Guarded);
5720 vmStack().pushBool(true);
5724 OPTBLD_INLINE void iopIncl(PC origpc, PC& pc) {
5725 inclOp(origpc, pc, InclOpFlags::Default, "include");
5728 OPTBLD_INLINE void iopInclOnce(PC origpc, PC& pc) {
5729 inclOp(origpc, pc, InclOpFlags::Once, "include_once");
5732 OPTBLD_INLINE void iopReq(PC origpc, PC& pc) {
5733 inclOp(origpc, pc, InclOpFlags::Fatal, "require");
5736 OPTBLD_INLINE void iopReqOnce(PC origpc, PC& pc) {
5737 inclOp(origpc, pc, InclOpFlags::Fatal | InclOpFlags::Once, "require_once");
5740 OPTBLD_INLINE void iopReqDoc(PC origpc, PC& pc) {
5741 inclOp(
5742 origpc,
5744 InclOpFlags::Fatal | InclOpFlags::Once | InclOpFlags::DocRoot,
5745 "require_once"
5749 OPTBLD_INLINE void iopEval(PC origpc, PC& pc) {
5750 Cell* c1 = vmStack().topC();
5752 if (UNLIKELY(RuntimeOption::EvalAuthoritativeMode)) {
5753 // Ahead of time whole program optimizations need to assume it can
5754 // see all the code, or it really can't do much.
5755 raise_error("You can't use eval in RepoAuthoritative mode");
5758 auto code = String::attach(prepareKey(*c1));
5759 String prefixedCode = concat(
5760 vmfp()->unit()->isHHFile() ? "<?hh " : "<?php ",
5761 code
5764 auto evalFilename = std::string();
5765 auto vm = &*g_context;
5766 string_printf(
5767 evalFilename,
5768 "%s(%d)(%s" EVAL_FILENAME_SUFFIX,
5769 vm->getContainingFileName()->data(),
5770 vm->getLine(),
5771 string_md5(code.slice()).c_str()
5773 Unit* unit = vm->compileEvalString(prefixedCode.get(), evalFilename.c_str());
5774 if (!RuntimeOption::EvalJitEvaledCode) {
5775 unit->setInterpretOnly();
5777 const StringData* msg;
5778 int line = 0;
5780 vmStack().popC();
5781 if (unit->parseFatal(msg, line)) {
5782 auto const errnum = static_cast<int>(ErrorMode::WARNING);
5783 if (vm->errorNeedsLogging(errnum)) {
5784 // manual call to Logger instead of logError as we need to use
5785 // evalFileName and line as the exception doesn't track the eval()
5786 Logger::Error(
5787 "\nFatal error: %s in %s on line %d",
5788 msg->data(),
5789 evalFilename.c_str(),
5790 line
5794 vmStack().pushBool(false);
5795 return;
5797 vm->evalUnit(unit, origpc, pc, EventHook::Eval);
5800 OPTBLD_INLINE void iopDefCls(uint32_t cid) {
5801 PreClass* c = vmfp()->m_func->unit()->lookupPreClassId(cid);
5802 Unit::defClass(c);
5805 OPTBLD_INLINE void iopDefRecord(uint32_t cid) {
5806 auto const r = vmfp()->m_func->unit()->lookupPreRecordId(cid);
5807 r->checkFieldDefaultValues();
5808 Unit::defRecordDesc(r);
5811 OPTBLD_INLINE void iopAliasCls(const StringData* original,
5812 const StringData* alias) {
5813 TypedValue* aloadTV = vmStack().topTV();
5814 tvCastToBooleanInPlace(aloadTV);
5815 assertx(aloadTV->m_type == KindOfBoolean);
5816 bool autoload = aloadTV->m_data.num;
5817 vmStack().popX();
5819 vmStack().pushBool(Unit::aliasClass(original, alias, autoload));
5822 OPTBLD_INLINE void iopDefClsNop(uint32_t /*cid*/) {}
5824 OPTBLD_INLINE void iopDefTypeAlias(uint32_t tid) {
5825 vmfp()->func()->unit()->defTypeAlias(tid);
5828 OPTBLD_INLINE void iopThis() {
5829 checkThis(vmfp());
5830 ObjectData* this_ = vmfp()->getThis();
5831 vmStack().pushObject(this_);
5834 OPTBLD_INLINE void iopBareThis(BareThisOp bto) {
5835 if (vmfp()->func()->cls() && vmfp()->hasThis()) {
5836 ObjectData* this_ = vmfp()->getThis();
5837 vmStack().pushObject(this_);
5838 } else {
5839 vmStack().pushNull();
5840 switch (bto) {
5841 case BareThisOp::Notice: raise_notice(Strings::WARN_NULL_THIS); break;
5842 case BareThisOp::NoNotice: break;
5843 case BareThisOp::NeverNull:
5844 assertx(!"$this cannot be null in BareThis with NeverNull option");
5845 break;
5850 OPTBLD_INLINE void iopCheckThis() {
5851 checkThis(vmfp());
5854 OPTBLD_INLINE void iopInitThisLoc(local_var thisLoc) {
5855 tvDecRefGen(thisLoc.ptr);
5856 if (vmfp()->func()->cls() && vmfp()->hasThis()) {
5857 thisLoc->m_data.pobj = vmfp()->getThis();
5858 thisLoc->m_type = KindOfObject;
5859 tvIncRefCountable(*thisLoc.ptr);
5860 } else {
5861 tvWriteUninit(*thisLoc.ptr);
5865 OPTBLD_INLINE void iopChainFaults() {
5866 auto const current = *vmStack().indC(1);
5867 auto const prev = *vmStack().indC(0);
5868 if (!isObjectType(current.m_type) ||
5869 !current.m_data.pobj->instanceof(SystemLib::s_ThrowableClass) ||
5870 !isObjectType(prev.m_type) ||
5871 !prev.m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) {
5872 raise_error(
5873 "Inputs to ChainFault must be objects that implement Throwable"
5877 // chainFaultObjects takes ownership of a reference to prev.
5878 vmStack().discard();
5879 chainFaultObjects(current.m_data.pobj, prev.m_data.pobj);
5882 OPTBLD_INLINE void iopLateBoundCls() {
5883 auto const cls = frameStaticClass(vmfp());
5884 if (!cls) raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
5885 vmStack().pushClass(cls);
5888 OPTBLD_INLINE void iopVerifyParamType(local_var param) {
5889 const Func *func = vmfp()->m_func;
5890 assertx(param.index < func->numParams());
5891 assertx(func->numParams() == int(func->params().size()));
5892 const TypeConstraint& tc = func->params()[param.index].typeConstraint;
5893 if (tc.isCheckable()) tc.verifyParam(param.ptr, func, param.index);
5896 OPTBLD_INLINE void iopVerifyParamTypeTS(local_var param) {
5897 iopVerifyParamType(param);
5898 auto const cell = vmStack().topC();
5899 assertx(tvIsDictOrDArray(cell));
5900 auto isTypeVar = tcCouldBeReified(vmfp()->m_func, param.index);
5901 bool warn = false;
5902 if ((isTypeVar || tvIsObject(param.ptr)) &&
5903 !verifyReifiedLocalType(cell->m_data.parr, param.ptr, isTypeVar, warn)) {
5904 raise_reified_typehint_error(
5905 folly::sformat(
5906 "Argument {} passed to {}() must be an instance of {}, {} given",
5907 param.index + 1,
5908 vmfp()->m_func->fullName()->data(),
5909 TypeStructure::toStringForDisplay(ArrNR(cell->m_data.parr)).c_str(),
5910 describe_actual_type(param.ptr, true)
5911 ), warn
5914 vmStack().popC();
5917 OPTBLD_INLINE void iopVerifyOutType(uint32_t paramId) {
5918 auto const func = vmfp()->m_func;
5919 assertx(paramId < func->numParams());
5920 assertx(func->numParams() == int(func->params().size()));
5921 auto const& tc = func->params()[paramId].typeConstraint;
5922 if (tc.isCheckable()) tc.verifyOutParam(vmStack().topTV(), func, paramId);
5925 namespace {
5927 OPTBLD_INLINE void verifyRetTypeImpl(size_t ind) {
5928 const auto func = vmfp()->m_func;
5929 const auto tc = func->returnTypeConstraint();
5930 if (tc.isCheckable()) tc.verifyReturn(vmStack().indC(ind), func);
5933 } // namespace
5935 OPTBLD_INLINE void iopVerifyRetTypeC() {
5936 if (UNLIKELY(!RuntimeOption::EvalCheckReturnTypeHints)) return;
5937 verifyRetTypeImpl(0); // Cell is on the top of the stack
5940 OPTBLD_INLINE void iopVerifyRetTypeTS() {
5941 if (UNLIKELY(!RuntimeOption::EvalCheckReturnTypeHints)) {
5942 vmStack().popC();
5943 return;
5945 verifyRetTypeImpl(1); // Cell is the second element on the stack
5946 auto const ts = vmStack().topC();
5947 assertx(tvIsDictOrDArray(ts));
5948 auto const cell = vmStack().indC(1);
5949 bool isTypeVar = tcCouldBeReified(vmfp()->m_func, TypeConstraint::ReturnId);
5950 bool warn = false;
5951 if ((isTypeVar || tvIsObject(cell)) &&
5952 !verifyReifiedLocalType(ts->m_data.parr, cell, isTypeVar, warn)) {
5953 raise_reified_typehint_error(
5954 folly::sformat(
5955 "Value returned from function {}() must be of type {}, {} given",
5956 vmfp()->m_func->fullName()->data(),
5957 TypeStructure::toStringForDisplay(ArrNR(ts->m_data.parr)).c_str(),
5958 describe_actual_type(cell, true)
5959 ), warn
5962 vmStack().popC();
5965 OPTBLD_INLINE void iopVerifyRetNonNullC() {
5966 if (UNLIKELY(!RuntimeOption::EvalCheckReturnTypeHints)) return;
5967 const auto func = vmfp()->m_func;
5968 const auto tc = func->returnTypeConstraint();
5969 tc.verifyReturnNonNull(vmStack().topC(), func);
5972 OPTBLD_INLINE TCA iopNativeImpl(PC& pc) {
5973 auto const jitReturn = jitReturnPre(vmfp());
5974 auto const func = vmfp()->func();
5975 auto const native = func->arFuncPtr();
5976 assertx(native != nullptr);
5977 // Actually call the native implementation. This will handle freeing the
5978 // locals in the normal case. In the case of an exception, the VM unwinder
5979 // will take care of it.
5980 native(vmfp());
5982 // Grab caller info from ActRec.
5983 ActRec* sfp = vmfp()->sfp();
5984 Offset callOff = vmfp()->m_callOff;
5986 // Adjust the stack; the native implementation put the return value in the
5987 // right place for us already
5988 vmStack().ndiscard(func->numSlotsInFrame());
5989 vmStack().ret();
5991 auto const retval = vmStack().topTV();
5993 // Return control to the caller.
5994 returnToCaller(pc, sfp, callOff);
5996 if (RuntimeOption::EvalArrayProvenance &&
5997 !func->isProvenanceSkipFrame()) {
5998 auto const origPC = vmpc();
5999 SCOPE_EXIT { vmpc() = origPC; };
6001 vmpc() = pc;
6002 *retval = arrprov::tagTV(*retval);
6004 return jitReturnPost(jitReturn);
6007 OPTBLD_INLINE void iopSelf() {
6008 auto const clss = arGetContextClass(vmfp());
6009 if (!clss) raise_error(HPHP::Strings::CANT_ACCESS_SELF);
6010 vmStack().pushClass(clss);
6013 OPTBLD_INLINE void iopParent() {
6014 auto const clss = arGetContextClass(vmfp());
6015 if (!clss) raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
6016 auto const parent = clss->parent();
6017 if (!parent) raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
6018 vmStack().pushClass(parent);
6021 OPTBLD_INLINE void iopCreateCl(uint32_t numArgs, uint32_t clsIx) {
6022 auto const func = vmfp()->m_func;
6023 auto const preCls = func->unit()->lookupPreClassId(clsIx);
6024 auto const c = Unit::defClosure(preCls);
6026 auto const cls = c->rescope(const_cast<Class*>(func->cls()));
6027 assertx(!cls->needInitialization());
6028 auto obj = RuntimeOption::RepoAuthoritative
6029 ? createClosureRepoAuth(cls) : createClosure(cls);
6030 c_Closure::fromObject(obj)->init(numArgs, vmfp(), vmStack().top());
6031 vmStack().ndiscard(numArgs);
6032 vmStack().pushObjectNoRc(obj);
6035 static inline BaseGenerator* this_base_generator(const ActRec* fp) {
6036 auto const obj = fp->getThis();
6037 assertx(obj->getVMClass() == AsyncGenerator::getClass() ||
6038 obj->getVMClass() == Generator::getClass());
6039 return obj->getVMClass() == Generator::getClass()
6040 ? static_cast<BaseGenerator*>(Generator::fromObject(obj))
6041 : static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj));
6044 static inline Generator* this_generator(const ActRec* fp) {
6045 auto const obj = fp->getThis();
6046 return Generator::fromObject(obj);
6049 const StaticString s_this("this");
6051 OPTBLD_INLINE TCA iopCreateCont(PC& pc) {
6052 auto const jitReturn = jitReturnPre(vmfp());
6054 auto const fp = vmfp();
6055 auto const func = fp->func();
6056 auto const numSlots = func->numSlotsInFrame();
6057 auto const resumeOffset = func->unit()->offsetOf(pc);
6058 assertx(!fp->resumed());
6059 assertx(func->isGenerator());
6061 // Create the {Async,}Generator object. Create takes care of copying local
6062 // variables and iterators.
6063 auto const obj = func->isAsync()
6064 ? AsyncGenerator::Create(fp, numSlots, nullptr, resumeOffset)
6065 : Generator::Create(fp, numSlots, nullptr, resumeOffset);
6067 auto const genData = func->isAsync() ?
6068 static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj)) :
6069 static_cast<BaseGenerator*>(Generator::fromObject(obj));
6071 EventHook::FunctionSuspendCreateCont(fp, genData->actRec());
6073 // Grab caller info from ActRec.
6074 ActRec* sfp = fp->sfp();
6075 Offset callOff = fp->m_callOff;
6077 // Free ActRec and store the return value.
6078 vmStack().ndiscard(numSlots);
6079 vmStack().ret();
6080 tvCopy(make_tv<KindOfObject>(obj), *vmStack().topTV());
6081 assertx(vmStack().topTV() == fp->retSlot());
6083 // Return control to the caller.
6084 returnToCaller(pc, sfp, callOff);
6086 return jitReturnPost(jitReturn);
6089 OPTBLD_INLINE void movePCIntoGenerator(PC origpc, BaseGenerator* gen) {
6090 assertx(gen->isRunning());
6091 ActRec* genAR = gen->actRec();
6092 genAR->setReturn(vmfp(), origpc, genAR->func()->isAsync() ?
6093 jit::tc::ustubs().asyncGenRetHelper :
6094 jit::tc::ustubs().genRetHelper);
6096 vmfp() = genAR;
6098 assertx(genAR->func()->contains(gen->resumable()->resumeOffset()));
6099 vmpc() = genAR->func()->unit()->at(gen->resumable()->resumeOffset());
6102 OPTBLD_INLINE bool tvIsGenerator(TypedValue tv) {
6103 return tv.m_type == KindOfObject &&
6104 tv.m_data.pobj->instanceof(Generator::getClass());
6107 template<bool recursive>
6108 OPTBLD_INLINE void contEnterImpl(PC origpc) {
6110 // The stack must have one cell! Or else resumableStackBase() won't work!
6111 assertx(vmStack().top() + 1 ==
6112 (TypedValue*)vmfp() - vmfp()->m_func->numSlotsInFrame());
6114 // Do linkage of the generator's AR.
6115 assertx(vmfp()->hasThis());
6116 // `recursive` determines whether we enter just the top generator or whether
6117 // we drop down to the lowest running delegate generator. This is useful for
6118 // ContRaise, which should throw from the context of the lowest generator.
6119 if(!recursive || vmfp()->getThis()->getVMClass() != Generator::getClass()) {
6120 movePCIntoGenerator(origpc, this_base_generator(vmfp()));
6121 } else {
6122 // TODO(https://github.com/facebook/hhvm/issues/6040)
6123 // Implement throwing from delegate generators.
6124 assertx(vmfp()->getThis()->getVMClass() == Generator::getClass());
6125 auto gen = this_generator(vmfp());
6126 if (gen->m_delegate.m_type != KindOfNull) {
6127 SystemLib::throwExceptionObject("Throwing from a delegate generator is "
6128 "not currently supported in HHVM");
6130 movePCIntoGenerator(origpc, gen);
6133 EventHook::FunctionResumeYield(vmfp());
6136 OPTBLD_INLINE void iopContEnter(PC origpc, PC& pc) {
6137 contEnterImpl<false>(origpc);
6138 pc = vmpc();
6141 OPTBLD_INLINE void iopContRaise(PC origpc, PC& pc) {
6142 contEnterImpl<true>(origpc);
6143 pc = vmpc();
6144 iopThrow(pc);
6147 OPTBLD_INLINE TCA yield(PC& pc, const Cell* key, const Cell value) {
6148 auto const jitReturn = jitReturnPre(vmfp());
6150 auto const fp = vmfp();
6151 auto const func = fp->func();
6152 auto const resumeOffset = func->unit()->offsetOf(pc);
6153 assertx(fp->resumed());
6154 assertx(func->isGenerator());
6156 EventHook::FunctionSuspendYield(fp);
6158 auto const sfp = fp->sfp();
6159 auto const callOff = fp->m_callOff;
6161 if (!func->isAsync()) {
6162 // Non-async generator.
6163 assertx(fp->sfp());
6164 frame_generator(fp)->yield(resumeOffset, key, value);
6166 // Push return value of next()/send()/raise().
6167 vmStack().pushNull();
6168 } else {
6169 // Async generator.
6170 auto const gen = frame_async_generator(fp);
6171 auto const eagerResult = gen->yield(resumeOffset, key, value);
6172 if (eagerResult) {
6173 // Eager execution => return StaticWaitHandle.
6174 assertx(sfp);
6175 vmStack().pushObjectNoRc(eagerResult);
6176 } else {
6177 // Resumed execution => return control to the scheduler.
6178 assertx(!sfp);
6182 returnToCaller(pc, sfp, callOff);
6184 return jitReturnPost(jitReturn);
6187 OPTBLD_INLINE TCA iopYield(PC& pc) {
6188 auto const value = *vmStack().topC();
6189 vmStack().discard();
6190 return yield(pc, nullptr, value);
6193 OPTBLD_INLINE TCA iopYieldK(PC& pc) {
6194 auto const key = *vmStack().indC(1);
6195 auto const value = *vmStack().topC();
6196 vmStack().ndiscard(2);
6197 return yield(pc, &key, value);
6200 OPTBLD_INLINE bool typeIsValidGeneratorDelegate(DataType type) {
6201 return type == KindOfArray ||
6202 type == KindOfPersistentArray ||
6203 type == KindOfObject;
6206 OPTBLD_INLINE void iopContAssignDelegate(Iter* iter) {
6207 auto param = *vmStack().topC();
6208 vmStack().discard();
6209 auto gen = frame_generator(vmfp());
6210 if (UNLIKELY(!typeIsValidGeneratorDelegate(param.m_type))) {
6211 tvDecRefGen(param);
6212 SystemLib::throwErrorObject(
6213 "Can use \"yield from\" only with arrays and Traversables"
6217 // We don't use the iterator if we have a delegate generator (as iterators
6218 // mess with the internal state of the generator), so short circuit and dont
6219 // init our iterator in that case. Otherwise, if we init our iterator and it
6220 // returns false then we know that we have an empty iterator (like `[]`) in
6221 // which case just set our delegate to Null so that ContEnterDelegate and
6222 // YieldFromDelegate know something is up.
6223 if (tvIsGenerator(param) || iter->init<false>(&param)) {
6224 cellSet(param, gen->m_delegate);
6225 } else {
6226 cellSetNull(gen->m_delegate);
6228 // When using a subgenerator we don't actually read the values of the m_key
6229 // and m_value of our frame generator (the delegating generator). The
6230 // generator itself is still holding a reference to them though, so null
6231 // out the key/value to free the memory.
6232 cellSetNull(gen->m_key);
6233 cellSetNull(gen->m_value);
6236 OPTBLD_INLINE void iopContEnterDelegate(PC origpc, PC& pc) {
6237 // Make sure we have a delegate
6238 auto gen = frame_generator(vmfp());
6240 // Ignore the VM Stack, we want to pass that down from ContEnter
6242 // ContEnterDelegate doesn't do anything for iterators.
6243 if (!tvIsGenerator(gen->m_delegate)) {
6244 return;
6247 auto delegate = Generator::fromObject(gen->m_delegate.m_data.pobj);
6249 if (delegate->getState() == BaseGenerator::State::Done) {
6250 // If our generator finished earlier (or if there was nothing to do) just
6251 // continue on and let YieldFromDelegate handle cleaning up.
6252 return;
6255 // A pretty odd if statement, but consider the following situation.
6256 // Generators A and B both do `yield from` on a shared delegate generator,
6257 // C. When A is first used we autoprime it, and therefore also autoprime C as
6258 // well. Then we also autoprime B when it gets used, which advances C past
6259 // some perfectly valid data.
6260 // Basically this check is to make sure that we autoprime delegate generators
6261 // when needed, and not if they're shared.
6262 if (gen->getState() == BaseGenerator::State::Priming &&
6263 delegate->getState() != BaseGenerator::State::Created) {
6264 return;
6267 // We're about to resume executing our generator, so make sure we're in the
6268 // right state.
6269 delegate->preNext(false);
6271 movePCIntoGenerator(origpc, delegate);
6272 EventHook::FunctionResumeYield(vmfp());
6273 pc = vmpc();
6276 OPTBLD_INLINE
6277 TCA yieldFromGenerator(PC& pc, Generator* gen, Offset resumeOffset) {
6278 auto fp = vmfp();
6280 assertx(tvIsGenerator(gen->m_delegate));
6281 auto delegate = Generator::fromObject(gen->m_delegate.m_data.pobj);
6283 if (delegate->getState() == BaseGenerator::State::Done) {
6284 // If the generator is done, just copy the return value onto the stack.
6285 cellDup(delegate->m_value, *vmStack().topTV());
6286 return nullptr;
6289 auto jitReturn = jitReturnPre(fp);
6291 EventHook::FunctionSuspendYield(fp);
6292 auto const sfp = fp->sfp();
6293 auto const callOff = fp->m_callOff;
6295 // We don't actually want to "yield" anything here. The implementation of
6296 // key/current are smart enough to dive into our delegate generator, so
6297 // really what we want to do is clean up all of the generator metadata
6298 // (state, ressume address, etc) and continue on.
6299 assertx(gen->isRunning());
6300 gen->resumable()->setResumeAddr(nullptr, resumeOffset);
6301 gen->setState(BaseGenerator::State::Started);
6303 returnToCaller(pc, sfp, callOff);
6305 return jitReturnPost(jitReturn);
6308 OPTBLD_INLINE
6309 TCA yieldFromIterator(PC& pc, Generator* gen, Iter* it, Offset resumeOffset) {
6310 auto fp = vmfp();
6312 // For the most part this should never happen, the emitter assigns our
6313 // delegate to a non-null value in ContAssignDelegate. The one exception to
6314 // this is if we are given an empty iterator, in which case
6315 // ContAssignDelegate will remove our delegate and just send us to
6316 // YieldFromDelegate to return our null.
6317 if (UNLIKELY(gen->m_delegate.m_type == KindOfNull)) {
6318 tvWriteNull(*vmStack().topTV());
6319 return nullptr;
6322 // Otherwise, if iteration is finished we just return null.
6323 auto arr = it->arr();
6324 if (arr.end()) {
6325 // Push our null return value onto the stack
6326 tvWriteNull(*vmStack().topTV());
6327 return nullptr;
6330 auto jitReturn = jitReturnPre(fp);
6332 EventHook::FunctionSuspendYield(fp);
6333 auto const sfp = fp->sfp();
6334 auto const callOff = fp->m_callOff;
6336 auto key = *(arr.first().asTypedValue());
6337 auto value = *(arr.second().asTypedValue());
6338 gen->yield(resumeOffset, &key, value);
6340 returnToCaller(pc, sfp, callOff);
6342 it->next();
6344 return jitReturnPost(jitReturn);
6347 OPTBLD_INLINE TCA iopYieldFromDelegate(PC& pc, Iter* it, PC resumePc) {
6348 auto gen = frame_generator(vmfp());
6349 auto func = vmfp()->func();
6350 auto resumeOffset = func->unit()->offsetOf(resumePc);
6351 if (tvIsGenerator(gen->m_delegate)) {
6352 return yieldFromGenerator(pc, gen, resumeOffset);
6354 return yieldFromIterator(pc, gen, it, resumeOffset);
6357 OPTBLD_INLINE void iopContUnsetDelegate(CudOp subop, Iter* iter) {
6358 auto gen = frame_generator(vmfp());
6359 // The `shouldFreeIter` immediate determines whether we need to call free
6360 // on our iterator or not. Normally if we finish executing our yield from
6361 // successfully then the implementation of `next` will automatically do it
6362 // for us when there aren't any elements left, but if an exception is thrown
6363 // then we need to do it manually. We don't use the iterator when the
6364 // delegate is a generator though, so even if the param tells us to free it
6365 // we should just ignore it.
6366 if (UNLIKELY(subop == CudOp::FreeIter && !tvIsGenerator(gen->m_delegate))) {
6367 iter->free();
6369 cellSetNull(gen->m_delegate);
6372 OPTBLD_INLINE void iopContCheck(ContCheckOp subop) {
6373 this_base_generator(vmfp())->preNext(subop == ContCheckOp::CheckStarted);
6376 OPTBLD_INLINE void iopContValid() {
6377 vmStack().pushBool(
6378 this_generator(vmfp())->getState() != BaseGenerator::State::Done);
6381 OPTBLD_INLINE Generator *currentlyDelegatedGenerator(Generator *gen) {
6382 while(tvIsGenerator(gen->m_delegate)) {
6383 gen = Generator::fromObject(gen->m_delegate.m_data.pobj);
6385 return gen;
6388 OPTBLD_INLINE void iopContKey() {
6389 Generator* cont = this_generator(vmfp());
6390 cont->startedCheck();
6392 // If we are currently delegating to a generator, return its key instead
6393 cont = currentlyDelegatedGenerator(cont);
6395 cellDup(cont->m_key, *vmStack().allocC());
6398 OPTBLD_INLINE void iopContCurrent() {
6399 Generator* cont = this_generator(vmfp());
6400 cont->startedCheck();
6402 // If we are currently delegating to a generator, return its value instead
6403 cont = currentlyDelegatedGenerator(cont);
6405 if(cont->getState() == BaseGenerator::State::Done) {
6406 vmStack().pushNull();
6407 } else {
6408 cellDup(cont->m_value, *vmStack().allocC());
6412 OPTBLD_INLINE void iopContGetReturn() {
6413 Generator* cont = this_generator(vmfp());
6414 cont->startedCheck();
6416 if(!cont->successfullyFinishedExecuting()) {
6417 SystemLib::throwExceptionObject("Cannot get return value of a generator "
6418 "that hasn't returned");
6421 cellDup(cont->m_value, *vmStack().allocC());
6424 OPTBLD_INLINE void asyncSuspendE(PC& pc) {
6425 auto const fp = vmfp();
6426 auto const func = fp->func();
6427 auto const resumeOffset = func->unit()->offsetOf(pc);
6428 assertx(func->isAsync());
6429 assertx(resumeModeFromActRec(fp) != ResumeMode::Async);
6431 // Pop the dependency we are blocked on.
6432 auto child = wait_handle<c_WaitableWaitHandle>(*vmStack().topC());
6433 assertx(!child->isFinished());
6434 vmStack().discard();
6436 if (!func->isGenerator()) { // Async function.
6437 // Create the AsyncFunctionWaitHandle object. Create takes care of
6438 // copying local variables and itertors.
6439 auto waitHandle = c_AsyncFunctionWaitHandle::Create<true>(
6440 fp, func->numSlotsInFrame(), nullptr, resumeOffset, child);
6442 // Call the suspend hook. It will decref the newly allocated waitHandle
6443 // if it throws.
6444 EventHook::FunctionSuspendAwaitEF(fp, waitHandle->actRec());
6446 // Grab caller info from ActRec.
6447 ActRec* sfp = fp->sfp();
6448 Offset callOff = fp->m_callOff;
6450 // Free ActRec and store the return value. In case async eager return was
6451 // requested by the caller, let it know that we did not finish eagerly.
6452 vmStack().ndiscard(func->numSlotsInFrame());
6453 vmStack().ret();
6454 tvCopy(make_tv<KindOfObject>(waitHandle), *vmStack().topTV());
6455 vmStack().topTV()->m_aux.u_asyncNonEagerReturnFlag = -1;
6456 assertx(vmStack().topTV() == fp->retSlot());
6458 // Return control to the caller.
6459 returnToCaller(pc, sfp, callOff);
6460 } else { // Async generator.
6461 // Create new AsyncGeneratorWaitHandle.
6462 auto waitHandle = c_AsyncGeneratorWaitHandle::Create(
6463 fp, nullptr, resumeOffset, child);
6465 // Call the suspend hook. It will decref the newly allocated waitHandle
6466 // if it throws.
6467 EventHook::FunctionSuspendAwaitEG(fp);
6469 // Store the return value.
6470 vmStack().pushObjectNoRc(waitHandle);
6472 // Return control to the caller (AG::next()).
6473 assertx(fp->sfp());
6474 returnToCaller(pc, fp->sfp(), fp->m_callOff);
6478 OPTBLD_INLINE void asyncSuspendR(PC& pc) {
6479 auto const fp = vmfp();
6480 auto const func = fp->func();
6481 auto const resumeOffset = func->unit()->offsetOf(pc);
6482 assertx(!fp->sfp());
6483 assertx(func->isAsync());
6484 assertx(resumeModeFromActRec(fp) == ResumeMode::Async);
6486 // Pop the dependency we are blocked on.
6487 auto child = req::ptr<c_WaitableWaitHandle>::attach(
6488 wait_handle<c_WaitableWaitHandle>(*vmStack().topC()));
6489 assertx(!child->isFinished());
6490 vmStack().discard();
6492 // Before adjusting the stack or doing anything, check the suspend hook.
6493 // This can throw.
6494 EventHook::FunctionSuspendAwaitR(fp, child.get());
6496 // Await child and suspend the async function/generator. May throw.
6497 if (!func->isGenerator()) { // Async function.
6498 frame_afwh(fp)->await(resumeOffset, std::move(child));
6499 } else { // Async generator.
6500 auto const gen = frame_async_generator(fp);
6501 gen->resumable()->setResumeAddr(nullptr, resumeOffset);
6502 gen->getWaitHandle()->await(std::move(child));
6505 // Return control to the scheduler.
6506 pc = nullptr;
6507 vmfp() = nullptr;
6510 namespace {
6512 TCA suspendStack(PC &pc) {
6513 auto const jitReturn = jitReturnPre(vmfp());
6514 if (resumeModeFromActRec(vmfp()) == ResumeMode::Async) {
6515 // suspend resumed execution
6516 asyncSuspendR(pc);
6517 } else {
6518 // suspend eager execution
6519 asyncSuspendE(pc);
6521 return jitReturnPost(jitReturn);
6526 OPTBLD_INLINE TCA iopAwait(PC& pc) {
6527 auto const awaitable = vmStack().topC();
6528 auto wh = c_Awaitable::fromCell(*awaitable);
6529 if (UNLIKELY(wh == nullptr)) {
6530 SystemLib::throwBadMethodCallExceptionObject("Await on a non-Awaitable");
6532 if (LIKELY(wh->isFailed())) {
6533 throw req::root<Object>{wh->getException()};
6535 if (wh->isSucceeded()) {
6536 cellSet(wh->getResult(), *vmStack().topC());
6537 return nullptr;
6539 return suspendStack(pc);
6542 OPTBLD_INLINE TCA iopAwaitAll(PC& pc, LocalRange locals) {
6543 uint32_t cnt = 0;
6544 for (auto i = locals.first; i < locals.first + locals.count; ++i) {
6545 auto const local = *frame_local(vmfp(), i);
6546 if (cellIsNull(local)) continue;
6547 auto const awaitable = c_Awaitable::fromCell(local);
6548 if (UNLIKELY(awaitable == nullptr)) {
6549 SystemLib::throwBadMethodCallExceptionObject("Await on a non-Awaitable");
6551 if (!awaitable->isFinished()) {
6552 ++cnt;
6556 if (!cnt) {
6557 vmStack().pushNull();
6558 return nullptr;
6561 auto obj = Object::attach(c_AwaitAllWaitHandle::fromFrameNoCheck(
6562 locals.count, cnt, frame_local(vmfp(), locals.first)
6564 assertx(obj->isWaitHandle());
6565 assertx(!static_cast<c_Awaitable*>(obj.get())->isFinished());
6567 vmStack().pushObjectNoRc(obj.detach());
6568 return suspendStack(pc);
6571 OPTBLD_INLINE void iopWHResult() {
6572 // we should never emit this bytecode for non-waithandle
6573 auto const wh = c_Awaitable::fromCell(*vmStack().topC());
6574 if (UNLIKELY(!wh)) {
6575 raise_error("WHResult input was not a subclass of Awaitable");
6578 // the failure condition is likely since we punt to this opcode
6579 // in the JIT when the state is failed.
6580 if (wh->isFailed()) {
6581 throw_object(Object{wh->getException()});
6583 if (wh->isSucceeded()) {
6584 cellSet(wh->getResult(), *vmStack().topC());
6585 return;
6587 SystemLib::throwInvalidOperationExceptionObject(
6588 "Request for result on pending wait handle, "
6589 "must await or join() before calling result()");
6590 not_reached();
6593 OPTBLD_INLINE void iopCheckProp(const StringData* propName) {
6594 auto* cls = vmfp()->getClass();
6595 auto* propVec = cls->getPropData();
6596 always_assert(propVec);
6598 auto* ctx = arGetContextClass(vmfp());
6599 auto idx = ctx->lookupDeclProp(propName);
6601 auto& tv = (*propVec)[idx];
6602 vmStack().pushBool(tv.m_type != KindOfUninit);
6605 OPTBLD_INLINE void iopInitProp(const StringData* propName, InitPropOp propOp) {
6606 auto* cls = vmfp()->getClass();
6607 TypedValue* tv;
6609 auto* ctx = arGetContextClass(vmfp());
6610 auto* fr = vmStack().topC();
6612 switch (propOp) {
6613 case InitPropOp::Static: {
6614 auto const slot = ctx->lookupSProp(propName);
6615 assertx(slot != kInvalidSlot);
6616 tv = cls->getSPropData(slot);
6617 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
6618 auto const& sprop = cls->staticProperties()[slot];
6619 auto const& tc = sprop.typeConstraint;
6620 if (tc.isCheckable()) {
6621 tc.verifyStaticProperty(fr, cls, sprop.cls, sprop.name);
6624 break;
6627 case InitPropOp::NonStatic: {
6628 auto* propVec = cls->getPropData();
6629 always_assert(propVec);
6630 auto const idx = ctx->lookupDeclProp(propName);
6631 assertx(idx != kInvalidSlot);
6632 tv = &(*propVec)[idx];
6633 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
6634 auto const& prop = cls->declProperties()[idx];
6635 auto const& tc = prop.typeConstraint;
6636 if (tc.isCheckable()) tc.verifyProperty(fr, cls, prop.cls, prop.name);
6638 } break;
6641 cellDup(*fr, *tvToCell(tv));
6642 vmStack().popC();
6645 OPTBLD_INLINE void iopOODeclExists(OODeclExistsOp subop) {
6646 TypedValue* aloadTV = vmStack().topTV();
6647 if (aloadTV->m_type != KindOfBoolean) {
6648 raise_error("OODeclExists: Expected Bool on top of stack, got %s",
6649 tname(aloadTV->m_type).c_str());
6652 bool autoload = aloadTV->m_data.num;
6653 vmStack().popX();
6655 TypedValue* name = vmStack().topTV();
6656 if (!isStringType(name->m_type)) {
6657 raise_error("OODeclExists: Expected String on stack, got %s",
6658 tname(aloadTV->m_type).c_str());
6661 ClassKind kind;
6662 switch (subop) {
6663 case OODeclExistsOp::Class : kind = ClassKind::Class; break;
6664 case OODeclExistsOp::Trait : kind = ClassKind::Trait; break;
6665 case OODeclExistsOp::Interface : kind = ClassKind::Interface; break;
6667 tvAsVariant(name) = Unit::classExists(name->m_data.pstr, autoload, kind);
6670 OPTBLD_INLINE void iopSilence(local_var loc, SilenceOp subop) {
6671 switch (subop) {
6672 case SilenceOp::Start:
6673 loc.ptr->m_type = KindOfInt64;
6674 loc.ptr->m_data.num = zero_error_level();
6675 break;
6676 case SilenceOp::End:
6677 assertx(loc.ptr->m_type == KindOfInt64);
6678 restore_error_level(loc.ptr->m_data.num);
6679 break;
6683 std::string prettyStack(const std::string& prefix) {
6684 if (!vmfp()) return "__Halted";
6685 int offset = (vmfp()->m_func->unit() != nullptr)
6686 ? pcOff() : 0;
6687 auto begPrefix = prefix + "__";
6688 auto midPrefix = prefix + "|| ";
6689 auto endPrefix = prefix + "\\/";
6690 auto stack = vmStack().toString(vmfp(), offset, midPrefix);
6691 return begPrefix + "\n" + stack + endPrefix;
6694 // callable from gdb
6695 void DumpStack() {
6696 fprintf(stderr, "%s\n", prettyStack("").c_str());
6699 // callable from gdb
6700 void DumpCurUnit(int skip) {
6701 ActRec* fp = vmfp();
6702 Offset pc = fp->m_func->unit() ? pcOff() : 0;
6703 while (skip--) {
6704 fp = g_context->getPrevVMState(fp, &pc);
6706 if (fp == nullptr) {
6707 std::cout << "Don't have a valid fp\n";
6708 return;
6711 printf("Offset = %d, in function %s\n", pc, fp->m_func->name()->data());
6712 Unit* u = fp->m_func->unit();
6713 if (u == nullptr) {
6714 std::cout << "Current unit is NULL\n";
6715 return;
6717 printf("Dumping bytecode for %s(%p)\n", u->filepath()->data(), u);
6718 std::cout << u->toString();
6721 // callable from gdb
6722 void PrintTCCallerInfo() {
6723 VMRegAnchor _;
6725 auto const u = vmfp()->m_func->unit();
6726 auto const rip = []() -> jit::TCA {
6727 DECLARE_FRAME_POINTER(reg_fp);
6728 // NB: We can't directly mutate the register-mapped `reg_fp'.
6729 for (ActRec* fp = reg_fp; fp; fp = fp->m_sfp) {
6730 auto const rip = jit::TCA(fp->m_savedRip);
6731 if (jit::tc::isValidCodeAddress(rip)) return rip;
6733 return nullptr;
6734 }();
6736 fprintf(stderr, "Called from TC address %p\n", rip);
6737 std::cerr << u->filepath()->data() << ':'
6738 << u->getLineNumber(u->offsetOf(vmpc())) << '\n';
6741 // thread-local cached coverage info
6742 static __thread Unit* s_prev_unit;
6743 static __thread int s_prev_line;
6745 void recordCodeCoverage(PC /*pc*/) {
6746 Unit* unit = vmfp()->m_func->unit();
6747 assertx(unit != nullptr);
6748 if (unit == SystemLib::s_hhas_unit) {
6749 return;
6751 int line = unit->getLineNumber(pcOff());
6752 assertx(line != -1);
6754 if (unit != s_prev_unit || line != s_prev_line) {
6755 s_prev_unit = unit;
6756 s_prev_line = line;
6757 const StringData* filepath = unit->filepath();
6758 assertx(filepath->isStatic());
6759 RI().m_coverage->Record(filepath->data(), line, line);
6763 void resetCoverageCounters() {
6764 s_prev_line = -1;
6765 s_prev_unit = nullptr;
6768 static inline void
6769 condStackTraceSep(Op opcode) {
6770 TRACE(3, "%s "
6771 "========================================"
6772 "========================================\n",
6773 opcodeToName(opcode));
6776 #define COND_STACKTRACE(pfx)\
6777 ONTRACE(3, auto stack = prettyStack(pfx);\
6778 Trace::trace("%s\n", stack.c_str());)
6780 namespace {
6783 * iopWrapReturn() calls a function pointer and forwards its return value if it
6784 * returns TCA, or nullptr if returns void.
6786 template<typename... Params, typename... Args>
6787 OPTBLD_INLINE TCA iopWrapReturn(void(fn)(Params...), PC, Args&&... args) {
6788 fn(std::forward<Args>(args)...);
6789 return nullptr;
6792 template<typename... Params, typename... Args>
6793 OPTBLD_INLINE TCA iopWrapReturn(TCA(fn)(Params...), PC, Args&&... args) {
6794 return fn(std::forward<Args>(args)...);
6798 * iopSwitch and iopSSwitch take vectors containing Offset and need origpc to
6799 * translate those to PC. Special-case that here rather than creating a new
6800 * flag in hhbc.h just for this one case.
6802 template<typename... Params, typename... Args>
6803 OPTBLD_INLINE TCA iopWrapReturn(void(fn)(PC, Params...), PC origpc,
6804 Args&&... args) {
6805 fn(origpc, std::forward<Args>(args)...);
6806 return nullptr;
6810 * Some bytecodes with SA immediates want the raw Id to look up a NamedEntity
6811 * quickly, and some want the const StringData*. Support both by decoding to
6812 * this struct and implicitly converting to what the callee wants.
6814 struct litstr_id {
6815 /* implicit */ ALWAYS_INLINE operator const StringData*() const {
6816 return liveUnit()->lookupLitstrId(id);
6818 /* implicit */ ALWAYS_INLINE operator Id() const {
6819 return id;
6822 Id id{kInvalidId};
6826 * These macros are used to generate wrapper functions for the iop*() functions
6827 * defined earlier in this file. iopWrapFoo() decodes immediates from the
6828 * bytecode stream according to the signature of Foo (in hhbc.h), then calls
6829 * iopFoo() with those decoded arguments.
6831 #define FLAG_NF
6832 #define FLAG_TF
6833 #define FLAG_CF , pc
6834 #define FLAG_PF
6835 #define FLAG_CF_TF FLAG_CF
6837 #define DECODE_IVA decode_iva(pc)
6838 #define DECODE_I64A decode<int64_t>(pc)
6839 #define DECODE_LA decode_local(pc)
6840 #define DECODE_IA decode_iter(pc)
6841 #define DECODE_DA decode<double>(pc)
6842 #define DECODE_SA decode<litstr_id>(pc)
6843 #define DECODE_AA decode_litarr(pc)
6844 #define DECODE_RATA decode_rat(pc)
6845 #define DECODE_BA origpc + decode_ba(pc)
6846 #define DECODE_OA(ty) decode<ty>(pc)
6847 #define DECODE_KA decode_member_key(pc, liveUnit())
6848 #define DECODE_LAR decodeLocalRange(pc)
6849 #define DECODE_FCA decodeFCallArgs(op, pc)
6850 #define DECODE_BLA decode_imm_array<Offset>(pc)
6851 #define DECODE_SLA decode_imm_array<StrVecItem>(pc)
6852 #define DECODE_ILA decode_iter_table(pc)
6853 #define DECODE_I32LA decode_imm_array<uint32_t>(pc)
6854 #define DECODE_VSA decode_imm_array<Id>(pc)
6856 #define DECODE_NA
6857 #define DECODE_ONE(a) auto const imm1 = DECODE_##a;
6858 #define DECODE_TWO(a, b) DECODE_ONE(a) auto const imm2 = DECODE_##b;
6859 #define DECODE_THREE(a, b, c) DECODE_TWO(a, b) auto const imm3 = DECODE_##c;
6860 #define DECODE_FOUR(a, b, c, d) \
6861 DECODE_THREE(a, b, c) auto const imm4 = DECODE_##d;
6862 #define DECODE_FIVE(a, b, c, d, e) \
6863 DECODE_FOUR(a, b, c, d) auto const imm5 = DECODE_##e;
6865 #define PASS_NA
6866 #define PASS_ONE(...) , imm1
6867 #define PASS_TWO(...) , imm1, imm2
6868 #define PASS_THREE(...) , imm1, imm2, imm3
6869 #define PASS_FOUR(...) , imm1, imm2, imm3, imm4
6870 #define PASS_FIVE(...) , imm1, imm2, imm3, imm4, imm5
6872 #define O(name, imm, in, out, flags) \
6873 OPTBLD_INLINE TCA iopWrap##name(PC& pc) { \
6874 UNUSED auto const op = Op::name; \
6875 UNUSED auto const origpc = pc - encoded_op_size(op); \
6876 DECODE_##imm \
6877 return iopWrapReturn(iop##name, origpc FLAG_##flags PASS_##imm); \
6879 OPCODES
6881 #undef FLAG_NF
6882 #undef FLAG_TF
6883 #undef FLAG_CF
6884 #undef FLAG_PF
6885 #undef FLAG_CF_TF
6887 #undef DECODE_IVA
6888 #undef DECODE_I64A
6889 #undef DECODE_LA
6890 #undef DECODE_IA
6891 #undef DECODE_DA
6892 #undef DECODE_SA
6893 #undef DECODE_AA
6894 #undef DECODE_RATA
6895 #undef DECODE_BA
6896 #undef DECODE_OA
6897 #undef DECODE_KA
6898 #undef DECODE_LAR
6899 #undef DECODE_FCA
6900 #undef DECODE_BLA
6901 #undef DECODE_SLA
6902 #undef DECODE_ILA
6903 #undef DECODE_I32LA
6904 #undef DECODE_VSA
6906 #undef DECODE_NA
6907 #undef DECODE_ONE
6908 #undef DECODE_TWO
6909 #undef DECODE_THREE
6910 #undef DECODE_FOUR
6911 #undef DECODE_FIVE
6913 #undef PASS_NA
6914 #undef PASS_ONE
6915 #undef PASS_TWO
6916 #undef PASS_THREE
6917 #undef PASS_FOUR
6918 #undef PASS_FIVE
6920 #undef O
6925 * The interpOne functions are fat wrappers around the iop* functions, mostly
6926 * adding a bunch of debug-only logging and stats tracking.
6928 #define O(opcode, imm, push, pop, flags) \
6929 TCA interpOne##opcode(ActRec* fp, TypedValue* sp, Offset pcOff) { \
6930 interp_set_regs(fp, sp, pcOff); \
6931 SKTRACE(5, liveSK(), \
6932 "%40s %p %p\n", \
6933 "interpOne" #opcode " before (fp,sp)", vmfp(), vmsp()); \
6934 if (Stats::enableInstrCount()) { \
6935 Stats::inc(Stats::Instr_Transl##opcode, -1); \
6936 Stats::inc(Stats::Instr_InterpOne##opcode); \
6938 if (Trace::moduleEnabled(Trace::interpOne, 1)) { \
6939 static const StringData* cat = makeStaticString("interpOne"); \
6940 static const StringData* name = makeStaticString(#opcode); \
6941 Stats::incStatGrouped(cat, name, 1); \
6943 if (Trace::moduleEnabled(Trace::ringbuffer)) { \
6944 auto sk = liveSK().toAtomicInt(); \
6945 Trace::ringbufferEntry(Trace::RBTypeInterpOne, sk, 0); \
6947 INC_TPC(interp_one) \
6948 /* Correct for over-counting in TC-stats. */ \
6949 Stats::inc(Stats::Instr_TC, -1); \
6950 condStackTraceSep(Op##opcode); \
6951 COND_STACKTRACE("op"#opcode" pre: "); \
6952 PC pc = vmpc(); \
6953 ONTRACE(1, auto offset = vmfp()->m_func->unit()->offsetOf(pc); \
6954 Trace::trace("op"#opcode" offset: %d\n", offset)); \
6955 assertx(peek_op(pc) == Op::opcode); \
6956 pc += encoded_op_size(Op::opcode); \
6957 auto const retAddr = iopWrap##opcode(pc); \
6958 vmpc() = pc; \
6959 COND_STACKTRACE("op"#opcode" post: "); \
6960 condStackTraceSep(Op##opcode); \
6962 * Only set regstate back to dirty if an exception is not
6963 * propagating. If an exception is throwing, regstate for this call
6964 * is actually still correct, and we don't have information in the
6965 * fixup map for interpOne calls anyway.
6966 */ \
6967 tl_regState = VMRegState::DIRTY; \
6968 return retAddr; \
6970 OPCODES
6971 #undef O
6973 InterpOneFunc interpOneEntryPoints[] = {
6974 #define O(opcode, imm, push, pop, flags) &interpOne##opcode,
6975 OPCODES
6976 #undef O
6979 template <bool breakOnCtlFlow>
6980 TCA dispatchImpl() {
6981 // Unfortunately, MSVC doesn't support computed
6982 // gotos, so use a switch instead.
6983 bool collectCoverage = RID().getCoverage();
6985 #ifndef _MSC_VER
6986 static const void* const optabDirect[] = {
6987 #define O(name, imm, push, pop, flags) \
6988 &&Label##name,
6989 OPCODES
6990 #undef O
6992 static const void* const optabDbg[] = {
6993 #define O(name, imm, push, pop, flags) \
6994 &&LabelDbg##name,
6995 OPCODES
6996 #undef O
6998 static const void* const optabCover[] = {
6999 #define O(name, imm, push, pop, flags) \
7000 &&LabelCover##name,
7001 OPCODES
7002 #undef O
7004 assertx(sizeof(optabDirect) / sizeof(const void *) == Op_count);
7005 assertx(sizeof(optabDbg) / sizeof(const void *) == Op_count);
7006 const void* const* optab = optabDirect;
7007 if (collectCoverage) {
7008 optab = optabCover;
7010 DEBUGGER_ATTACHED_ONLY(optab = optabDbg);
7011 #endif
7013 bool isCtlFlow = false;
7014 TCA retAddr = nullptr;
7015 Op op;
7017 #ifdef _MSC_VER
7018 # define DISPATCH_ACTUAL() goto DispatchSwitch
7019 #else
7020 # define DISPATCH_ACTUAL() goto *optab[size_t(op)]
7021 #endif
7023 #define DISPATCH() do { \
7024 if (breakOnCtlFlow && isCtlFlow) { \
7025 ONTRACE(1, \
7026 Trace::trace("dispatch: Halt dispatch(%p)\n", \
7027 vmfp())); \
7028 return retAddr; \
7030 opPC = pc; \
7031 op = decode_op(pc); \
7032 COND_STACKTRACE("dispatch: "); \
7033 FTRACE(1, "dispatch: {}: {}\n", pcOff(), \
7034 instrToString(opPC, vmfp()->m_func->unit())); \
7035 DISPATCH_ACTUAL(); \
7036 } while (0)
7038 ONTRACE(1, Trace::trace("dispatch: Enter dispatch(%p)\n",
7039 vmfp()));
7040 PC pc = vmpc();
7041 PC opPC;
7042 DISPATCH();
7044 #define OPCODE_DBG_BODY(name, imm, push, pop, flags) \
7045 phpDebuggerOpcodeHook(opPC)
7046 #define OPCODE_COVER_BODY(name, imm, push, pop, flags) \
7047 if (collectCoverage) { \
7048 recordCodeCoverage(opPC); \
7050 #define OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
7052 if (breakOnCtlFlow && Stats::enableInstrCount()) { \
7053 Stats::inc(Stats::Instr_InterpBB##name); \
7055 retAddr = iopWrap##name(pc); \
7056 vmpc() = pc; \
7057 if (breakOnCtlFlow) { \
7058 isCtlFlow = instrIsControlFlow(Op::name); \
7060 if (instrCanHalt(Op::name) && UNLIKELY(!pc)) { \
7061 vmfp() = nullptr; \
7062 /* We returned from the top VM frame in this nesting level. This means
7063 * m_savedRip in our ActRec must have been callToExit, which should've
7064 * been returned by jitReturnPost(), whether or not we were called from
7065 * the TC. We only actually return callToExit to our caller if that
7066 * caller is dispatchBB(). */ \
7067 assertx(retAddr == jit::tc::ustubs().callToExit); \
7068 return breakOnCtlFlow ? retAddr : nullptr; \
7070 assertx(isCtlFlow || !retAddr); \
7071 DISPATCH(); \
7074 #ifdef _MSC_VER
7075 DispatchSwitch:
7076 switch (uint8_t(op)) {
7077 #define O(name, imm, push, pop, flags) \
7078 case Op::name: { \
7079 DEBUGGER_ATTACHED_ONLY(OPCODE_DBG_BODY(name, imm, push, pop, flags)); \
7080 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
7081 OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
7083 #else
7084 #define O(name, imm, push, pop, flags) \
7085 LabelDbg##name: \
7086 OPCODE_DBG_BODY(name, imm, push, pop, flags); \
7087 LabelCover##name: \
7088 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
7089 Label##name: \
7090 OPCODE_MAIN_BODY(name, imm, push, pop, flags)
7091 #endif
7093 OPCODES
7095 #ifdef _MSC_VER
7097 #endif
7098 #undef O
7099 #undef DISPATCH
7100 #undef DISPATCH_ACTUAL
7101 #undef OPCODE_DBG_BODY
7102 #undef OPCODE_COVER_BODY
7103 #undef OPCODE_MAIN_BODY
7105 assertx(retAddr == nullptr);
7106 return nullptr;
7109 static void dispatch() {
7110 WorkloadStats guard(WorkloadStats::InInterp);
7112 DEBUG_ONLY auto const retAddr = dispatchImpl<false>();
7113 assertx(retAddr == nullptr);
7116 // We are about to go back to translated code, check whether we should
7117 // stick with the interpreter. NB: if we've just executed a return
7118 // from pseudomain, then there's no PC and no more code to interpret.
7119 OPTBLD_INLINE TCA switchModeForDebugger(TCA retAddr) {
7120 if (DEBUGGER_FORCE_INTR && (vmpc() != 0)) {
7121 if (retAddr) {
7122 // We just interpreted a bytecode that decided we need to return to an
7123 // address in the TC rather than interpreting up into our caller. This
7124 // means it might not be safe to throw an exception right now (see
7125 // discussion in jitReturnPost). So, resume execution in the TC at a stub
7126 // that will throw the execution from a safe place.
7127 FTRACE(1, "Want to throw VMSwitchMode but retAddr = {}, "
7128 "overriding with throwSwitchMode stub.\n", retAddr);
7129 return jit::tc::ustubs().throwSwitchMode;
7130 } else {
7131 throw VMSwitchMode();
7135 return retAddr;
7138 TCA dispatchBB() {
7139 auto sk = [] {
7140 return SrcKey(vmfp()->func(), vmpc(), resumeModeFromActRec(vmfp()),
7141 vmfp()->func()->cls() && vmfp()->hasThis());
7144 if (Trace::moduleEnabled(Trace::dispatchBB)) {
7145 static auto cat = makeStaticString("dispatchBB");
7146 auto name = makeStaticString(show(sk()));
7147 Stats::incStatGrouped(cat, name, 1);
7149 if (Trace::moduleEnabled(Trace::ringbuffer)) {
7150 Trace::ringbufferEntry(Trace::RBTypeDispatchBB, sk().toAtomicInt(), 0);
7152 auto retAddr = dispatchImpl<true>();
7153 return switchModeForDebugger(retAddr);