New bytecode for static method calls with dynamic class name
[hiphop-php.git] / hphp / runtime / vm / bytecode.cpp
blobee1fcef2c8a528e2fa69936bdda47a357f4969f6
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
18 #include "hphp/runtime/vm/bytecode.h"
20 #include <algorithm>
21 #include <string>
22 #include <vector>
23 #include <sstream>
24 #include <iostream>
25 #include <iomanip>
26 #include <cinttypes>
28 #include <boost/filesystem.hpp>
30 #include <folly/String.h>
31 #include <folly/portability/SysMman.h>
33 #include "hphp/util/numa.h"
34 #include "hphp/util/portability.h"
35 #include "hphp/util/ringbuffer.h"
36 #include "hphp/util/text-util.h"
37 #include "hphp/util/trace.h"
39 #include "hphp/system/systemlib.h"
41 #include "hphp/runtime/base/apc-stats.h"
42 #include "hphp/runtime/base/apc-typed-value.h"
43 #include "hphp/runtime/base/array-init.h"
44 #include "hphp/runtime/base/array-iterator.h"
45 #include "hphp/runtime/base/array-provenance.h"
46 #include "hphp/runtime/base/bespoke-array.h"
47 #include "hphp/runtime/base/code-coverage.h"
48 #include "hphp/runtime/base/collections.h"
49 #include "hphp/runtime/base/container-functions.h"
50 #include "hphp/runtime/base/enum-util.h"
51 #include "hphp/runtime/base/execution-context.h"
52 #include "hphp/runtime/base/file-util.h"
53 #include "hphp/runtime/base/hhprof.h"
54 #include "hphp/runtime/base/implicit-context.h"
55 #include "hphp/runtime/base/memory-manager.h"
56 #include "hphp/runtime/base/object-data.h"
57 #include "hphp/runtime/base/program-functions.h"
58 #include "hphp/runtime/base/rds.h"
59 #include "hphp/runtime/base/repo-auth-type-codec.h"
60 #include "hphp/runtime/base/runtime-error.h"
61 #include "hphp/runtime/base/runtime-option.h"
62 #include "hphp/runtime/base/stat-cache.h"
63 #include "hphp/runtime/base/stats.h"
64 #include "hphp/runtime/base/strings.h"
65 #include "hphp/runtime/base/tv-arith.h"
66 #include "hphp/runtime/base/tv-comparisons.h"
67 #include "hphp/runtime/base/tv-conversions.h"
68 #include "hphp/runtime/base/tv-refcount.h"
69 #include "hphp/runtime/base/tv-type.h"
70 #include "hphp/runtime/base/type-structure.h"
71 #include "hphp/runtime/base/type-structure-helpers-defs.h"
72 #include "hphp/runtime/base/type-structure-helpers.h"
73 #include "hphp/runtime/base/type-variant.h"
74 #include "hphp/runtime/base/unit-cache.h"
75 #include "hphp/runtime/base/vanilla-dict.h"
76 #include "hphp/runtime/base/vanilla-keyset.h"
78 #include "hphp/runtime/ext/array/ext_array.h"
79 #include "hphp/runtime/ext/asio/ext_await-all-wait-handle.h"
80 #include "hphp/runtime/ext/asio/ext_async-function-wait-handle.h"
81 #include "hphp/runtime/ext/asio/ext_async-generator-wait-handle.h"
82 #include "hphp/runtime/ext/asio/ext_async-generator.h"
83 #include "hphp/runtime/ext/asio/ext_static-wait-handle.h"
84 #include "hphp/runtime/ext/asio/ext_wait-handle.h"
85 #include "hphp/runtime/ext/asio/ext_waitable-wait-handle.h"
86 #include "hphp/runtime/ext/std/ext_std_closure.h"
87 #include "hphp/runtime/ext/extension.h"
88 #include "hphp/runtime/ext/generator/ext_generator.h"
89 #include "hphp/runtime/ext/hh/ext_hh.h"
90 #include "hphp/runtime/ext/reflection/ext_reflection.h"
91 #include "hphp/runtime/ext/std/ext_std_variable.h"
92 #include "hphp/runtime/ext/string/ext_string.h"
93 #include "hphp/runtime/ext/json/JSON_parser.h"
95 #include "hphp/runtime/server/rpc-request-handler.h"
96 #include "hphp/runtime/server/source-root-info.h"
98 #include "hphp/runtime/vm/act-rec-defs.h"
99 #include "hphp/runtime/vm/act-rec.h"
100 #include "hphp/runtime/vm/class.h"
101 #include "hphp/runtime/vm/class-meth-data-ref.h"
102 #include "hphp/runtime/vm/cti.h"
103 #include "hphp/runtime/vm/debug/debug.h"
104 #include "hphp/runtime/vm/debugger-hook.h"
105 #include "hphp/runtime/vm/event-hook.h"
106 #include "hphp/runtime/ext/functioncredential/ext_functioncredential.h"
107 #include "hphp/runtime/vm/hh-utils.h"
108 #include "hphp/runtime/vm/hhbc-codec.h"
109 #include "hphp/runtime/vm/hhbc.h"
110 #include "hphp/runtime/vm/interp-helpers.h"
111 #include "hphp/runtime/vm/iter.h"
112 #include "hphp/runtime/vm/member-operations.h"
113 #include "hphp/runtime/vm/memo-cache.h"
114 #include "hphp/runtime/vm/method-lookup.h"
115 #include "hphp/runtime/vm/native.h"
116 #include "hphp/runtime/vm/reified-generics.h"
117 #include "hphp/runtime/vm/repo-global-data.h"
118 #include "hphp/runtime/vm/resumable.h"
119 #include "hphp/runtime/vm/runtime.h"
120 #include "hphp/runtime/vm/srckey.h"
121 #include "hphp/runtime/vm/super-inlining-bros.h"
122 #include "hphp/runtime/vm/taint/interpreter.h"
123 #include "hphp/runtime/vm/type-constraint.h"
124 #include "hphp/runtime/vm/type-profile.h"
125 #include "hphp/runtime/vm/unwind.h"
126 #include "hphp/runtime/vm/workload-stats.h"
128 #include "hphp/runtime/vm/jit/code-cache.h"
129 #include "hphp/runtime/vm/jit/enter-tc.h"
130 #include "hphp/runtime/vm/jit/jit-resume-addr-defs.h"
131 #include "hphp/runtime/vm/jit/perf-counters.h"
132 #include "hphp/runtime/vm/jit/service-request-handlers.h"
133 #include "hphp/runtime/vm/jit/tc.h"
134 #include "hphp/runtime/vm/jit/translator-inline.h"
135 #include "hphp/runtime/vm/jit/translator-runtime.h"
136 #include "hphp/runtime/vm/jit/translator.h"
137 #include "hphp/runtime/vm/jit/unwind-itanium.h"
139 #include "hphp/util/stacktrace-profiler.h"
142 namespace HPHP {
144 TRACE_SET_MOD(bcinterp);
146 // RepoAuthoritative has been raptured out of runtime_option.cpp. It needs
147 // to be closer to other bytecode.cpp data.
148 bool RuntimeOption::RepoAuthoritative = false;
150 using jit::JitResumeAddr;
151 using jit::TCA;
153 // GCC 4.8 has some real problems with all the inlining in this file, so don't
154 // go overboard with that version.
155 #if !defined(NDEBUG) || ((__GNUC__ == 4) && (__GNUC_MINOR__ == 8))
156 #define OPTBLD_INLINE
157 #else
158 #define OPTBLD_INLINE ALWAYS_INLINE
159 #endif
161 Class* arGetContextClass(const ActRec* ar) {
162 if (ar == nullptr) {
163 return nullptr;
165 return ar->func()->cls();
168 void frame_free_locals_no_hook(ActRec* fp) {
169 frame_free_locals_inl_no_hook(fp, fp->func()->numLocals());
172 const StaticString s_file("file");
173 const StaticString s_line("line");
175 ///////////////////////////////////////////////////////////////////////////////
177 //=============================================================================
178 // Miscellaneous decoders.
180 inline const char* prettytype(int) { return "int"; }
181 inline const char* prettytype(long) { return "long"; }
182 inline const char* prettytype(long long) { return "long long"; }
183 inline const char* prettytype(double) { return "double"; }
184 inline const char* prettytype(unsigned) { return "unsigned"; }
185 inline const char* prettytype(OODeclExistsOp) { return "OpDeclExistsOp"; }
186 inline const char* prettytype(FatalOp) { return "FatalOp"; }
187 inline const char* prettytype(IsTypeOp) { return "IsTypeOp"; }
188 inline const char* prettytype(SetOpOp) { return "SetOpOp"; }
189 inline const char* prettytype(IncDecOp) { return "IncDecOp"; }
190 inline const char* prettytype(ObjMethodOp) { return "ObjMethodOp"; }
191 inline const char* prettytype(BareThisOp) { return "BareThisOp"; }
192 inline const char* prettytype(InitPropOp) { return "InitPropOp"; }
193 inline const char* prettytype(SilenceOp) { return "SilenceOp"; }
194 inline const char* prettytype(SwitchKind) { return "SwitchKind"; }
195 inline const char* prettytype(MOpMode) { return "MOpMode"; }
196 inline const char* prettytype(QueryMOp) { return "QueryMOp"; }
197 inline const char* prettytype(SetRangeOp) { return "SetRangeOp"; }
198 inline const char* prettytype(TypeStructResolveOp) {
199 return "TypeStructResolveOp";
201 inline const char* prettytype(ReadonlyOp) { return "ReadonlyOp"; }
202 inline const char* prettytype(ContCheckOp) { return "ContCheckOp"; }
203 inline const char* prettytype(SpecialClsRef) { return "SpecialClsRef"; }
204 inline const char* prettytype(CollectionType) { return "CollectionType"; }
205 inline const char* prettytype(IsLogAsDynamicCallOp) {
206 return "IsLogAsDynamicCallOp";
209 // load a T value from *pc without incrementing
210 template<class T> T peek(PC pc) {
211 T v;
212 std::memcpy(&v, pc, sizeof v);
213 TRACE(2, "decode: Immediate %s %" PRIi64"\n", prettytype(v), int64_t(v));
214 return v;
217 template<class T> T decode(PC& pc) {
218 auto v = peek<T>(pc);
219 pc += sizeof(T);
220 return v;
223 inline const ArrayData* decode_litarr(PC& pc) {
224 return liveUnit()->lookupArrayId(decode<Id>(pc));
227 ALWAYS_INLINE TypedValue* decode_local(PC& pc) {
228 auto la = decode_iva(pc);
229 assertx(la < vmfp()->func()->numLocals());
230 return frame_local(vmfp(), la);
233 ALWAYS_INLINE local_var decode_indexed_local(PC& pc) {
234 auto la = decode_iva(pc);
235 assertx(la < vmfp()->func()->numLocals());
236 return local_var{frame_local(vmfp(), la), safe_cast<int32_t>(la)};
239 ALWAYS_INLINE named_local_var decode_named_local_var(PC& pc) {
240 auto loc = decode_named_local(pc);
241 assertx(0 <= loc.id);
242 assertx(loc.id < vmfp()->func()->numLocals());
243 assertx(kInvalidLocalName <= loc.name);
244 assertx(loc.name < vmfp()->func()->numNamedLocals());
245 return named_local_var{loc.name, frame_local(vmfp(), loc.id)};
248 ALWAYS_INLINE Iter* decode_iter(PC& pc) {
249 auto ia = decode_iva(pc);
250 return frame_iter(vmfp(), ia);
253 template<typename T>
254 OPTBLD_INLINE imm_array<T> decode_imm_array(PC& pc) {
255 auto const size = decode_iva(pc);
256 auto const arr_pc = pc;
257 pc += size * sizeof(T);
258 return imm_array<T>{size, arr_pc};
261 OPTBLD_INLINE RepoAuthType decode_rat(PC& pc) {
262 if (debug) return decodeRAT(liveUnit(), pc);
264 pc += encodedRATSize(pc);
265 return RepoAuthType{};
268 //=============================================================================
269 // Miscellaneous helpers.
271 static inline Class* frameStaticClass(ActRec* fp) {
272 if (!fp->func()->cls()) return nullptr;
273 if (fp->hasThis()) {
274 return fp->getThis()->getVMClass();
276 return fp->getClass();
279 //=============================================================================
280 // VarEnv.
282 namespace {
283 const StaticString
284 s_argc("argc"),
285 s_argv("argv"),
286 s__SERVER("_SERVER"),
287 s__GET("_GET"),
288 s__POST("_POST"),
289 s__COOKIE("_COOKIE"),
290 s__FILES("_FILES"),
291 s__ENV("_ENV"),
292 s__REQUEST("_REQUEST"),
293 s_HTTP_RAW_POST_DATA("HTTP_RAW_POST_DATA");
296 void createGlobalNVTable() {
297 assertx(!g_context->m_globalNVTable);
298 g_context->m_globalNVTable = req::make_raw<NameValueTable>();
299 auto nvTable = g_context->m_globalNVTable;
300 Variant arr(ArrayData::CreateDict());
301 nvTable->set(s_argc.get(), init_null_variant.asTypedValue());
302 nvTable->set(s_argv.get(), init_null_variant.asTypedValue());
303 nvTable->set(s__SERVER.get(), arr.asTypedValue());
304 nvTable->set(s__GET.get(), arr.asTypedValue());
305 nvTable->set(s__POST.get(), arr.asTypedValue());
306 nvTable->set(s__COOKIE.get(), arr.asTypedValue());
307 nvTable->set(s__FILES.get(), arr.asTypedValue());
308 nvTable->set(s__ENV.get(), arr.asTypedValue());
309 nvTable->set(s__REQUEST.get(), arr.asTypedValue());
310 nvTable->set(s_HTTP_RAW_POST_DATA.get(), init_null_variant.asTypedValue());
313 const StaticString s_reified_generics_var("0ReifiedGenerics");
314 const StaticString s_coeffects_var("0Coeffects");
316 Array getDefinedVariables() {
317 Array ret = Array::CreateDict();
319 NameValueTable::Iterator iter(g_context->m_globalNVTable);
320 for (; iter.valid(); iter.next()) {
321 auto const sd = iter.curKey();
322 auto const val = iter.curVal();
323 // Reified functions and functions with coeffects rules
324 // have an internal variables
325 if (s_reified_generics_var.equal(sd) || s_coeffects_var.equal(sd)) {
326 continue;
328 ret.set(StrNR(sd).asString(), Variant{const_variant_ref{val}});
331 // Make result independent of the hashtable implementation.
332 ArrayData* sorted = ret->escalateForSort(SORTFUNC_KSORT);
333 assertx(sorted == ret.get() ||
334 sorted->empty() ||
335 sorted->hasExactlyOneRef());
336 SCOPE_EXIT {
337 if (sorted != ret.get()) {
338 ret = Array::attach(sorted);
341 sorted->ksort(0, true);
343 return ret;
346 //=============================================================================
347 // Stack.
349 // Store actual stack elements array in a thread-local in order to amortize the
350 // cost of allocation.
351 struct StackElms {
352 ~StackElms() { free(m_elms); }
353 TypedValue* elms() {
354 if (m_elms == nullptr) {
355 // RuntimeOption::EvalVMStackElms-sized and -aligned.
356 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
357 if (posix_memalign((void**)&m_elms, algnSz, algnSz) != 0) {
358 throw std::runtime_error(
359 std::string("VM stack initialization failed: ") +
360 folly::errnoStr(errno).c_str());
362 madvise(m_elms, algnSz, MADV_DONTNEED);
364 return m_elms;
366 void flush() {
367 if (m_elms != nullptr) {
368 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
369 madvise(m_elms, algnSz, MADV_DONTNEED);
372 private:
373 TypedValue* m_elms{nullptr};
375 THREAD_LOCAL_FLAT(StackElms, t_se);
377 const int Stack::sSurprisePageSize = sysconf(_SC_PAGESIZE);
378 // We reserve the bottom page of each stack for use as the surprise
379 // page, so the minimum useful stack size is the next power of two.
380 const uint32_t Stack::sMinStackElms =
381 2 * sSurprisePageSize / sizeof(TypedValue);
383 void Stack::ValidateStackSize() {
384 if (RuntimeOption::EvalVMStackElms < sMinStackElms) {
385 throw std::runtime_error(folly::sformat(
386 "VM stack size of {:#x} is below the minimum of {:#x}",
387 RuntimeOption::EvalVMStackElms,
388 sMinStackElms
391 if (!folly::isPowTwo(RuntimeOption::EvalVMStackElms)) {
392 throw std::runtime_error(folly::sformat(
393 "VM stack size of {:#x} is not a power of 2",
394 RuntimeOption::EvalVMStackElms
399 Stack::Stack()
400 : m_elms(nullptr), m_top(nullptr), m_base(nullptr) {
403 Stack::~Stack() {
404 requestExit();
407 void Stack::requestInit() {
408 m_elms = t_se->elms();
409 // Burn one element of the stack, to satisfy the constraint that
410 // valid m_top values always have the same high-order (>
411 // log(RuntimeOption::EvalVMStackElms)) bits.
412 m_top = m_base = m_elms + RuntimeOption::EvalVMStackElms - 1;
414 rds::header()->stackLimitAndSurprise.store(
415 reinterpret_cast<uintptr_t>(
416 reinterpret_cast<char*>(m_elms) + sSurprisePageSize +
417 stackCheckPadding() * sizeof(TypedValue)
419 std::memory_order_release
421 assertx(!(rds::header()->stackLimitAndSurprise.load() & kSurpriseFlagMask));
423 // Because of the surprise page at the bottom of the stack we lose an
424 // additional 256 elements which must be taken into account when checking for
425 // overflow.
426 UNUSED size_t maxelms =
427 RuntimeOption::EvalVMStackElms - sSurprisePageSize / sizeof(TypedValue);
428 assertx(!wouldOverflow(maxelms - 1));
429 assertx(wouldOverflow(maxelms));
432 void Stack::requestExit() {
433 m_elms = nullptr;
436 void flush_evaluation_stack() {
437 if (vmStack().isAllocated()) {
438 // For RPCRequestHandler threads, the ExecutionContext can stay
439 // alive across requests, but its always ok to kill it between
440 // requests, so do so now
441 RPCRequestHandler::cleanupState();
444 tl_heap->flush();
446 if (!t_se.isNull()) {
447 t_se->flush();
449 rds::flush();
450 json_parser_flush_caches();
452 always_assert(tl_heap->empty());
455 static std::string toStringElm(TypedValue tv) {
456 std::ostringstream os;
458 if (!isRealType(tv.m_type)) {
459 os << " ??? type " << static_cast<data_type_t>(tv.m_type) << "\n";
460 return os.str();
462 if (isRefcountedType(tv.m_type) &&
463 !tv.m_data.pcnt->checkCount()) {
464 // OK in the invoking frame when running a destructor.
465 os << " ??? inner_count " << tvGetCount(tv) << " ";
466 return os.str();
469 auto print_count = [&] {
470 if (tv.m_data.pcnt->isStatic()) {
471 os << ":c(static)";
472 } else if (tv.m_data.pcnt->isUncounted()) {
473 os << ":c(uncounted)";
474 } else {
475 os << ":c(" << tvGetCount(tv) << ")";
479 os << "C:";
481 do {
482 switch (tv.m_type) {
483 case KindOfUninit:
484 os << "Uninit";
485 continue;
486 case KindOfNull:
487 os << "Null";
488 continue;
489 case KindOfBoolean:
490 os << (tv.m_data.num ? "True" : "False");
491 continue;
492 case KindOfInt64:
493 os << "0x" << std::hex << tv.m_data.num << std::dec;
494 continue;
495 case KindOfDouble:
496 os << tv.m_data.dbl;
497 continue;
498 case KindOfPersistentString:
499 case KindOfString:
501 int len = tv.m_data.pstr->size();
502 bool truncated = false;
503 if (len > 128) {
504 len = 128;
505 truncated = true;
507 os << tv.m_data.pstr;
508 print_count();
509 os << ":\""
510 << escapeStringForCPP(tv.m_data.pstr->data(), len)
511 << "\"" << (truncated ? "..." : "");
513 continue;
514 case KindOfPersistentVec:
515 case KindOfVec:
516 assertx(tv.m_data.parr->isVecType());
517 assertx(tv.m_data.parr->checkCount());
518 os << tv.m_data.parr;
519 print_count();
520 os << ":Vec";
521 continue;
522 case KindOfPersistentDict:
523 case KindOfDict:
524 assertx(tv.m_data.parr->isDictType());
525 assertx(tv.m_data.parr->checkCount());
526 os << tv.m_data.parr;
527 print_count();
528 os << ":Dict";
529 continue;
530 case KindOfPersistentKeyset:
531 case KindOfKeyset:
532 assertx(tv.m_data.parr->isKeysetType());
533 assertx(tv.m_data.parr->checkCount());
534 os << tv.m_data.parr;
535 print_count();
536 os << ":Keyset";
537 continue;
538 case KindOfObject:
539 assertx(tv.m_data.pobj->checkCount());
540 os << tv.m_data.pobj;
541 print_count();
542 os << ":Object("
543 << tv.m_data.pobj->getClassName().get()->data()
544 << ")";
545 continue;
546 case KindOfResource:
547 assertx(tv.m_data.pres->checkCount());
548 os << tv.m_data.pres;
549 print_count();
550 os << ":Resource("
551 << tv.m_data.pres->data()->o_getClassName().get()->data()
552 << ")";
553 continue;
554 case KindOfRFunc: // TODO(T63348446) serialize the reified generics
555 assertx(tv.m_data.prfunc->checkCount());
556 os << tv.m_data.prfunc;
557 print_count();
558 os << ":RFunc("
559 << tv.m_data.prfunc->m_func->fullName()->data()
560 << ")<"
561 << tv.m_data.prfunc->m_arr
562 << ">";
563 continue;
564 case KindOfFunc:
565 os << ":Func("
566 << tv.m_data.pfunc->fullName()->data()
567 << ")";
568 continue;
569 case KindOfClass:
570 os << ":Class("
571 << tv.m_data.pclass->name()->data()
572 << ")";
573 continue;
574 case KindOfLazyClass:
575 os << ":LClass("
576 << tv.m_data.plazyclass.name()->data()
577 << ")";
578 continue;
579 case KindOfClsMeth:
580 os << ":ClsMeth("
581 << tv.m_data.pclsmeth->getCls()->name()->data()
582 << ", "
583 << tv.m_data.pclsmeth->getFunc()->fullName()->data()
584 << ")";
585 continue;
586 case KindOfRClsMeth:
587 os << ":RClsMeth("
588 << tv.m_data.prclsmeth->m_cls->name()->data()
589 << ", "
590 << tv.m_data.prclsmeth->m_func->fullName()->data()
591 << ")<"
592 << tv.m_data.prclsmeth->m_arr
593 << ">";
594 continue;
596 not_reached();
597 } while (0);
599 return os.str();
603 * Return true if Offset o is inside the protected region of a fault
604 * funclet for iterId, otherwise false.
606 static bool checkIterScope(const Func* f, Offset o, Id iterId) {
607 assertx(o >= 0 && o < f->bclen());
608 for (auto const& eh : f->ehtab()) {
609 if (eh.m_base <= o && o < eh.m_past &&
610 eh.m_iterId == iterId) {
611 return true;
614 return false;
617 static void toStringFrame(std::ostream& os, const ActRec* fp,
618 int offset, const TypedValue* ftop,
619 const std::string& prefix, bool isTop = true) {
620 assertx(fp);
622 // Use depth-first recursion to output the most deeply nested stack frame
623 // first.
625 Offset prevPc = 0;
626 TypedValue* prevStackTop = nullptr;
627 ActRec* prevFp = g_context->getPrevVMState(fp, &prevPc, &prevStackTop);
628 if (prevFp != nullptr) {
629 toStringFrame(os, prevFp, prevPc, prevStackTop, prefix, false);
633 os << prefix;
634 const Func* func = fp->func();
635 assertx(func);
636 func->validate();
637 std::string funcName(func->fullName()->data());
638 os << "{func:" << funcName
639 << ",callOff:" << fp->callOffset()
640 << ",this:0x"
641 << std::hex << (func->cls() && fp->hasThis() ? fp->getThis() : nullptr)
642 << std::dec << "}";
644 if (func->numLocals() > 0) {
645 // Don't print locals for parent frames on a Ret(C|V) since some of them
646 // may already be destructed.
647 if (isRet(func->getOp(offset)) && !isTop) {
648 os << "<locals destroyed>";
649 } else {
650 os << "<";
651 int n = func->numLocals();
652 for (int i = 0; i < n; i++) {
653 if (i > 0) {
654 os << " ";
656 os << toStringElm(*frame_local(fp, i));
658 os << ">";
662 if (func->numIterators() > 0) {
663 os << "|";
664 for (int i = 0; i < func->numIterators(); i++) {
665 if (i > 0) {
666 os << " ";
668 if (checkIterScope(func, offset, i)) {
669 os << frame_iter(fp, i)->toString();
670 } else {
671 os << "I:Undefined";
674 os << "|";
677 std::vector<std::string> stackElems;
678 visitStackElems(
679 fp, ftop,
680 [&](const TypedValue* tv) {
681 stackElems.push_back(toStringElm(*tv));
684 std::reverse(stackElems.begin(), stackElems.end());
685 os << ' ' << folly::join(' ', stackElems);
687 os << '\n';
690 std::string Stack::toString(const ActRec* fp, int offset,
691 const std::string prefix/* = "" */) const {
692 // The only way to figure out which stack elements are activation records is
693 // to follow the frame chain. However, the goal for each stack frame is to
694 // print stack fragments from deepest to shallowest -- a then b in the
695 // following example:
697 // {func:foo,callOff:51}<C:8> {func:bar} C:8 C:1 {func:biz} C:0
698 // aaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbb
700 // Use depth-first recursion to get the output order correct.
702 std::ostringstream os;
703 auto unit = fp->unit();
704 auto func = fp->func();
705 os << prefix << "=== Stack at "
706 << unit->filepath()->data() << ":"
707 << func->getLineNumber(func->offsetOf(vmpc()))
708 << " func " << func->fullName()->data() << " ===\n";
710 toStringFrame(os, fp, offset, m_top, prefix);
712 return os.str();
715 bool Stack::wouldOverflow(int numCells) const {
716 // The funny approach here is to validate the translator's assembly
717 // technique. We've aligned and sized the stack so that the high order
718 // bits of valid cells are all the same. In the translator, numCells
719 // can be hardcoded, and m_top is wired into a register,
720 // so the expression requires no loads.
721 intptr_t truncatedTop = intptr_t(m_top) / sizeof(TypedValue);
722 truncatedTop &= RuntimeOption::EvalVMStackElms - 1;
723 intptr_t diff = truncatedTop - numCells -
724 sSurprisePageSize / sizeof(TypedValue);
725 return diff < 0;
728 TypedValue* Stack::anyFrameStackBase(const ActRec* fp) {
729 return isResumed(fp) ? Stack::resumableStackBase(fp)
730 : Stack::frameStackBase(fp);
733 TypedValue* Stack::frameStackBase(const ActRec* fp) {
734 assertx(!isResumed(fp));
735 return (TypedValue*)fp - fp->func()->numSlotsInFrame();
738 TypedValue* Stack::resumableStackBase(const ActRec* fp) {
739 assertx(isResumed(fp));
740 auto sfp = fp->sfp();
741 if (sfp) {
742 // The non-reentrant case occurs when a non-async or async generator is
743 // resumed via ContEnter or ContRaise opcode. These opcodes leave a single
744 // value on the stack that becomes part of the generator's stack. So we
745 // find the caller's FP, compensate for its locals and iterators, and then
746 // we've found the base of the generator's stack.
747 assertx(fp->func()->isGenerator());
748 assertx(!sfp->func()->isGenerator());
749 return (TypedValue*)sfp - sfp->func()->numSlotsInFrame();
750 } else {
751 // The reentrant case occurs when asio scheduler resumes an async function
752 // or async generator. We simply use the top of stack of the previous VM
753 // frame (since the ActRec, locals, and iters for this frame do not reside
754 // on the VM stack).
755 assertx(fp->func()->isAsync());
756 TypedValue* prevSp;
757 DEBUG_ONLY auto const prevFp =
758 g_context.getNoCheck()->getPrevVMState(fp, nullptr, &prevSp);
759 assertx(prevFp != nullptr);
760 return prevSp;
764 Array getDefinedVariables(const ActRec* fp) {
765 if (UNLIKELY(fp == nullptr || fp->isInlined())) return empty_dict_array();
766 auto const func = fp->func();
767 auto const numLocals = func->numNamedLocals();
768 DictInit ret(numLocals);
769 for (Id id = 0; id < numLocals; ++id) {
770 auto const local = frame_local(fp, id);
771 if (type(local) == KindOfUninit) {
772 continue;
774 auto const localNameSd = func->localVarName(id);
775 if (!localNameSd) continue;
776 // this is basically just a convoluted const_cast :p
777 Variant name(localNameSd, Variant::PersistentStrInit{});
778 ret.set(name.getStringData(), Variant{variant_ref{local}});
780 return ret.toArray();
783 // Unpack or repack arguments as needed to match the function signature.
784 // The stack contains numArgs arguments plus an extra cell containing
785 // arguments to unpack.
786 uint32_t prepareUnpackArgs(const Func* func, uint32_t numArgs,
787 bool checkInOutAnnot) {
788 auto& stack = vmStack();
789 auto unpackArgs = *stack.topC();
790 if (!isContainer(unpackArgs)) throwInvalidUnpackArgs();
791 stack.discard();
792 SCOPE_EXIT { tvDecRefGen(unpackArgs); };
794 auto const numUnpackArgs = getContainerSize(unpackArgs);
795 auto const numParams = func->numNonVariadicParams();
796 if (LIKELY(numArgs == numParams)) {
797 // Convert unpack args to the proper type.
798 tvCastToVecInPlace(&unpackArgs);
799 stack.pushVec(unpackArgs.m_data.parr);
800 return numParams + 1;
803 ArrayIter iter(unpackArgs);
804 if (LIKELY(numArgs < numParams)) {
805 for (auto i = numArgs; iter && (i < numParams); ++i, ++iter) {
806 if (UNLIKELY(checkInOutAnnot && func->isInOut(i))) {
807 throwParamInOutMismatch(func, i);
809 auto const from = iter.secondValPlus();
810 tvDup(from, *stack.allocTV());
813 if (LIKELY(!iter)) {
814 // argArray was exhausted, so there are no "extra" arguments but there
815 // may be a deficit of non-variadic arguments, and the need to push an
816 // empty array for the variadic argument ... that work is left to
817 // prepareFuncEntry.
818 assertx(numArgs + numUnpackArgs <= numParams);
819 return numArgs + numUnpackArgs;
823 // there are "extra" arguments; passed as standard arguments prior to the
824 // ... unpack operator and/or still remaining in argArray
825 assertx(numArgs + numUnpackArgs > numParams);
826 assertx(numArgs > numParams || !!iter);
828 auto const numNewUnpackArgs = numArgs + numUnpackArgs - numParams;
829 VecInit ai(numNewUnpackArgs);
830 if (UNLIKELY(numArgs > numParams)) {
831 // The arguments are pushed in order, so we should start from the bottom.
832 auto ptr = stack.indTV(numArgs - numParams);
833 for (auto i = numParams; i < numArgs; ++i) {
834 ai.append(*--ptr);
836 for (auto i = numParams; i < numArgs; ++i) {
837 stack.popTV();
840 for (; iter; ++iter) {
841 ai.append(iter.secondValPlus());
843 auto const ad = ai.create();
844 assertx(ad->hasExactlyOneRef());
845 assertx(ad->size() == numNewUnpackArgs);
846 stack.pushArrayLikeNoRc(ad);
847 return numParams + 1;
850 static void prepareFuncEntry(ActRec *ar, uint32_t numArgsInclUnpack) {
851 assertx(!isResumed(ar));
852 assertx(
853 reinterpret_cast<TypedValue*>(ar) - vmStack().top() ==
854 ar->func()->numParams()
855 + (ar->func()->hasReifiedGenerics() ? 1U : 0U)
856 + (ar->func()->hasCoeffectsLocal() ? 1U : 0U)
859 const Func* func = ar->func();
861 vmStack().top() = reinterpret_cast<TypedValue*>(ar) - func->numSlotsInFrame();
862 vmfp() = ar;
863 vmpc() = func->entry() + func->getEntryForNumArgs(numArgsInclUnpack);
864 vmJitReturnAddr() = nullptr;
867 static void dispatch();
869 void enterVMAtFunc(ActRec* enterFnAr, uint32_t numArgsInclUnpack) {
870 assertx(enterFnAr);
871 assertx(!isResumed(enterFnAr));
872 Stats::inc(Stats::VMEnter);
874 prepareFuncEntry(enterFnAr, numArgsInclUnpack);
875 assertx(vmfp()->func()->contains(vmpc()));
877 if (RID().getJit() && !RID().getJitFolding()) {
878 jit::enterTC(jit::svcreq::getFuncEntry(enterFnAr->func()));
879 } else {
880 if (!funcEntry()) return;
881 dispatch();
885 void enterVMAtCurPC() {
886 assertx(vmfp());
887 assertx(vmpc());
888 assertx(vmfp()->func()->contains(vmpc()));
889 Stats::inc(Stats::VMEnter);
890 if (RID().getJit()) {
891 jit::enterTC(JitResumeAddr::helper(
892 jit::tc::ustubs().resumeHelperFromInterp));
893 } else {
894 dispatch();
898 static inline StringData* lookup_name(tv_rval key) {
899 return prepareKey(*key);
902 static inline tv_lval lookup_gbl(ActRec* /*fp*/, StringData*& name,
903 tv_rval key) {
904 name = lookup_name(key);
905 assertx(g_context->m_globalNVTable);
906 return g_context->m_globalNVTable->lookup(name);
909 static inline tv_lval lookupd_gbl(ActRec* /*fp*/, StringData*& name,
910 tv_rval key) {
911 name = lookup_name(key);
912 assertx(g_context->m_globalNVTable);
913 auto env = g_context->m_globalNVTable;
914 auto val = env->lookup(name);
915 if (!val) {
916 TypedValue tv;
917 tvWriteNull(tv);
918 env->set(name, &tv);
919 val = env->lookup(name);
921 return val;
924 static inline void lookup_sprop(ActRec* fp,
925 Class* cls,
926 StringData*& name,
927 TypedValue* key,
928 TypedValue*& val,
929 Slot& slot,
930 bool& visible,
931 bool& accessible,
932 bool& constant,
933 bool& readonly,
934 bool ignoreLateInit) {
935 name = lookup_name(key);
936 auto const ctx = arGetContextClass(fp);
938 auto const lookup = ignoreLateInit
939 ? cls->getSPropIgnoreLateInit(ctx, name)
940 : cls->getSProp(ctx, name);
942 val = lookup.val;
943 slot = lookup.slot;
944 visible = lookup.val != nullptr;
945 constant = lookup.constant;
946 readonly = lookup.readonly;
947 accessible = lookup.accessible;
950 static inline Class* lookupClsRef(TypedValue* input) {
951 Class* class_ = nullptr;
952 if (isStringType(input->m_type) || isLazyClassType(input->m_type)) {
953 auto const cname = isStringType(input->m_type) ?
954 input->m_data.pstr : input->m_data.plazyclass.name();
955 class_ = Class::load(cname);
956 if (class_ == nullptr) {
957 raise_error(Strings::UNKNOWN_CLASS, cname->data());
959 } else if (input->m_type == KindOfObject) {
960 class_ = input->m_data.pobj->getVMClass();
961 } else if (isClassType(input->m_type)) {
962 class_ = input->m_data.pclass;
963 } else {
964 raise_error("Cls: Expected string or object, got %s",
965 describe_actual_type(input).c_str());
967 return class_;
970 static UNUSED int innerCount(TypedValue tv) {
971 return isRefcountedType(tv.m_type) ? tvGetCount(tv) : -1;
975 * One iop* function exists for every bytecode. They all take a single PC&
976 * argument, which should be left pointing to the next bytecode to execute when
977 * the instruction is complete. Most return void, though a few return a
978 * jit::JitResumeAddr. The ones that return a JitReturnAddr return a true value
979 * to indicate that the caller must resume execution in the TC at the returned
980 * address. This is used to maintain certain invariants about how we get into
981 * and out of VM frames in jitted code; see comments on jitReturnPre() for more
982 * details.
985 OPTBLD_INLINE void iopNop() {
988 OPTBLD_INLINE void iopEntryNop() {
991 OPTBLD_INLINE void iopPopC() {
992 vmStack().popC();
995 OPTBLD_INLINE void iopPopU() {
996 vmStack().popU();
999 OPTBLD_INLINE void iopPopU2() {
1000 assertx(vmStack().indC(1)->m_type == KindOfUninit);
1001 *vmStack().indC(1) = *vmStack().topC();
1002 vmStack().discard();
1005 OPTBLD_INLINE void iopPopL(tv_lval to) {
1006 TypedValue* fr = vmStack().topC();
1007 tvMove(*fr, to);
1008 vmStack().discard();
1011 OPTBLD_INLINE void iopDup() {
1012 vmStack().dup();
1015 OPTBLD_INLINE void iopCGetCUNop() {
1018 OPTBLD_INLINE void iopUGetCUNop() {
1021 OPTBLD_INLINE void iopNull() {
1022 vmStack().pushNull();
1025 OPTBLD_INLINE void iopNullUninit() {
1026 vmStack().pushNullUninit();
1029 OPTBLD_INLINE void iopTrue() {
1030 vmStack().pushBool(true);
1033 OPTBLD_INLINE void iopFalse() {
1034 vmStack().pushBool(false);
1037 OPTBLD_INLINE void iopFile() {
1038 if (auto const of = vmfp()->func()->originalFilename()) {
1039 vmStack().pushStaticString(of);
1040 return;
1042 auto s = vmfp()->func()->unit()->filepath();
1043 vmStack().pushStaticString(s);
1046 OPTBLD_INLINE void iopDir() {
1047 auto const p = [&] {
1048 if (auto const of = vmfp()->func()->originalFilename()) {
1049 return of;
1051 return vmfp()->func()->unit()->filepath();
1052 }();
1053 vmStack().pushStaticString(
1054 makeStaticString(FileUtil::dirname(StrNR{p}))
1058 OPTBLD_INLINE void iopMethod() {
1059 auto s = vmfp()->func()->fullName();
1060 vmStack().pushStaticString(s);
1063 OPTBLD_INLINE void iopFuncCred() {
1064 vmStack().pushObjectNoRc(
1065 FunctionCredential::newInstance(vmfp()->func()));
1068 OPTBLD_INLINE void iopClassName() {
1069 auto const cls = vmStack().topC();
1070 if (!isClassType(cls->m_type)) {
1071 raise_error("Attempting to get name of non-class");
1073 vmStack().replaceC<KindOfPersistentString>(
1074 cls->m_data.pclass->name()
1078 OPTBLD_INLINE void iopLazyClassFromClass() {
1079 auto const cls = vmStack().topC();
1080 if (!isClassType(cls->m_type)) {
1081 raise_error("Attempting to get name of non-class");
1083 auto const cname = cls->m_data.pclass->name();
1084 auto const lclass = LazyClassData::create(cname);
1085 vmStack().replaceC<KindOfLazyClass>(lclass);
1088 OPTBLD_INLINE void iopInt(int64_t imm) {
1089 vmStack().pushInt(imm);
1092 OPTBLD_INLINE void iopDouble(double imm) {
1093 vmStack().pushDouble(imm);
1096 OPTBLD_INLINE void iopString(const StringData* s) {
1097 vmStack().pushStaticString(s);
1100 namespace {
1102 void profileArrLikePropsForInterp(ObjectData* obj) {
1103 if (g_context->doingInlineInterp()) return;
1104 bespoke::profileArrLikeProps(obj);
1107 ArrayData* maybeMakeBespokeArray(ArrayData* ad) {
1108 return bespoke::makeArrayOfSelectedLayout(ad);
1111 const ArrayData* maybeMakeBespokeArray(const ArrayData* ad) {
1112 return maybeMakeBespokeArray(const_cast<ArrayData*>(ad));
1115 void maybeMakeBespokeArrayAfterCast(TypedValue* tv) {
1116 auto const oldArr = val(tv).parr;
1117 auto const newArr = maybeMakeBespokeArray(oldArr);
1118 if (newArr == oldArr) return;
1119 val(tv).parr = newArr;
1120 type(tv) = dt_with_rc(type(tv));
1125 OPTBLD_INLINE void iopVec(const ArrayData* a) {
1126 assertx(a->isVecType());
1127 vmStack().pushStaticVec(maybeMakeBespokeArray(a));
1130 OPTBLD_INLINE void iopDict(const ArrayData* a) {
1131 assertx(a->isDictType());
1132 vmStack().pushStaticDict(maybeMakeBespokeArray(a));
1135 OPTBLD_INLINE void iopKeyset(const ArrayData* a) {
1136 assertx(a->isKeysetType());
1137 vmStack().pushStaticKeyset(maybeMakeBespokeArray(a));
1140 OPTBLD_INLINE void iopNewDictArray(uint32_t capacity) {
1141 auto const ad = capacity ? VanillaDict::MakeReserveDict(capacity)
1142 : ArrayData::CreateDict();
1143 vmStack().pushDictNoRc(maybeMakeBespokeArray(ad));
1146 namespace {
1148 template <typename F>
1149 ArrayData* newStructArrayImpl(imm_array<int32_t> ids, F f) {
1150 auto const n = ids.size;
1151 assertx(n > 0 && n <= ArrayData::MaxElemsOnStack);
1152 req::vector<const StringData*> names;
1153 names.reserve(n);
1154 auto unit = vmfp()->func()->unit();
1155 for (size_t i = 0; i < n; ++i) {
1156 auto name = unit->lookupLitstrId(ids[i]);
1157 names.push_back(name);
1160 // This constructor moves values, no inc/decref is necessary.
1161 auto const a = f(n, names.data(), vmStack().topC())->asArrayData();
1162 vmStack().ndiscard(n);
1163 return a;
1168 OPTBLD_INLINE void iopNewStructDict(imm_array<int32_t> ids) {
1169 auto const ad = newStructArrayImpl(ids, VanillaDict::MakeStructDict);
1170 vmStack().pushDictNoRc(maybeMakeBespokeArray(ad));
1173 OPTBLD_INLINE void iopNewVec(uint32_t n) {
1174 // This constructor moves values, no inc/decref is necessary.
1175 auto const ad = VanillaVec::MakeVec(n, vmStack().topC());
1176 vmStack().ndiscard(n);
1177 vmStack().pushVecNoRc(maybeMakeBespokeArray(ad));
1180 OPTBLD_INLINE void iopNewKeysetArray(uint32_t n) {
1181 // This constructor moves values, no inc/decref is necessary.
1182 auto const ad = VanillaKeyset::MakeSet(n, vmStack().topC());
1183 vmStack().ndiscard(n);
1184 vmStack().pushKeysetNoRc(maybeMakeBespokeArray(ad));
1187 OPTBLD_INLINE void iopAddElemC() {
1188 TypedValue* c1 = vmStack().topC();
1189 auto key = tvClassToString(*vmStack().indC(1));
1190 TypedValue* c3 = vmStack().indC(2);
1191 if (!tvIsDict(c3)) {
1192 raise_error("AddElemC: $3 must be an array or dict");
1194 tvAsVariant(*c3).asArrRef().set(tvAsCVarRef(key), tvAsCVarRef(c1));
1195 assertx(tvIsPlausible(*c3));
1196 vmStack().popC();
1197 vmStack().popC();
1200 OPTBLD_INLINE void iopAddNewElemC() {
1201 TypedValue* c1 = vmStack().topC();
1202 TypedValue* c2 = vmStack().indC(1);
1203 if (!tvIsVec(c2) && !tvIsKeyset(c2)) {
1204 raise_error("AddNewElemC: $2 must be an varray, vec, or keyset");
1206 tvAsVariant(*c2).asArrRef().append(tvAsCVarRef(c1));
1207 assertx(tvIsPlausible(*c2));
1208 vmStack().popC();
1211 OPTBLD_INLINE void iopNewCol(CollectionType cType) {
1212 assertx(cType != CollectionType::Pair);
1213 // Incref the collection object during construction.
1214 auto obj = collections::alloc(cType);
1215 vmStack().pushObjectNoRc(obj);
1218 OPTBLD_INLINE void iopNewPair() {
1219 TypedValue* c1 = vmStack().topC();
1220 TypedValue* c2 = vmStack().indC(1);
1221 // elements were pushed onto the stack in the order they should appear
1222 // in the pair, so the top of the stack should become the second element
1223 auto pair = collections::allocPair(*c2, *c1);
1224 // This constructor moves values, no inc/decref is necessary.
1225 vmStack().ndiscard(2);
1226 vmStack().pushObjectNoRc(pair);
1229 OPTBLD_INLINE void iopColFromArray(CollectionType cType) {
1230 assertx(cType != CollectionType::Pair);
1231 auto const c1 = vmStack().topC();
1232 if (cType == CollectionType::Vector || cType == CollectionType::ImmVector) {
1233 if (UNLIKELY(!isVecType(c1->m_type))) {
1234 raise_error("ColFromArray: $1 must be a Vec when creating an "
1235 "(Imm)Vector");
1237 } else if (UNLIKELY(!isDictType(c1->m_type))) {
1238 raise_error("ColFromArray: $1 must be a Dict when creating an (Imm)Set "
1239 "or an (Imm)Map");
1241 // This constructor reassociates the ArrayData with the collection, so no
1242 // inc/decref is needed for the array. The collection object itself is
1243 // increfed.
1244 auto obj = collections::alloc(cType, c1->m_data.parr);
1245 vmStack().discard();
1246 vmStack().pushObjectNoRc(obj);
1249 OPTBLD_INLINE void iopCnsE(const StringData* s) {
1250 auto const cns = Constant::load(s);
1251 if (type(cns) == KindOfUninit) {
1252 raise_error("Undefined constant '%s'", s->data());
1254 auto const c1 = vmStack().allocC();
1255 tvCopy(cns, *c1);
1258 OPTBLD_INLINE void iopClsCns(const StringData* clsCnsName) {
1259 auto const clsTV = vmStack().topC();
1260 if (!isClassType(clsTV->m_type)) {
1261 raise_error("Attempting class constant access on non-class");
1264 auto const cls = clsTV->m_data.pclass;
1265 auto const clsCns = cls->clsCnsGet(clsCnsName);
1267 if (clsCns.m_type == KindOfUninit) {
1268 raise_error("Couldn't find constant %s::%s",
1269 cls->name()->data(), clsCnsName->data());
1272 tvDup(clsCns, *clsTV);
1275 OPTBLD_INLINE void iopClsCnsD(const StringData* clsCnsName, Id classId) {
1276 const NamedEntityPair& classNamedEntity =
1277 vmfp()->func()->unit()->lookupNamedEntityPairId(classId);
1278 auto const clsCns = g_context->lookupClsCns(classNamedEntity.second,
1279 classNamedEntity.first, clsCnsName);
1280 auto const c1 = vmStack().allocC();
1281 tvDup(clsCns, *c1);
1284 OPTBLD_INLINE void iopClsCnsL(tv_lval local) {
1285 auto const clsTV = vmStack().topC();
1286 if (!isClassType(clsTV->m_type)) {
1287 raise_error("Attempting class constant access on non-class");
1289 auto const cls = clsTV->m_data.pclass;
1290 if (!isStringType(type(local))) {
1291 raise_error("String expected for %s constant", cls->name()->data());
1293 auto const clsCnsName = val(local).pstr;
1294 auto const clsCns = cls->clsCnsGet(clsCnsName);
1295 if (clsCns.m_type == KindOfUninit) {
1296 raise_error("Couldn't find constant %s::%s",
1297 cls->name()->data(), clsCnsName->data());
1299 tvSet(clsCns, *clsTV);
1302 String toStringWithNotice(const Variant& c) {
1303 static ConvNoticeLevel notice_level =
1304 flagToConvNoticeLevel(RuntimeOption::EvalNoticeOnCoerceForStrConcat);
1305 return c.toString(notice_level, s_ConvNoticeReasonConcat.get());
1308 OPTBLD_INLINE void iopConcat() {
1309 auto const c1 = vmStack().topC();
1310 auto const c2 = vmStack().indC(1);
1311 auto const s2 = toStringWithNotice(tvAsVariant(*c2));
1312 auto const s1 = toStringWithNotice(tvAsCVarRef(*c1));
1313 tvAsVariant(*c2) = concat(s2, s1);
1314 assertx(c2->m_data.pstr->checkCount());
1315 vmStack().popC();
1318 OPTBLD_INLINE void iopConcatN(uint32_t n) {
1319 auto const c1 = vmStack().topC();
1320 auto const c2 = vmStack().indC(1);
1321 auto const s1 = toStringWithNotice(tvAsCVarRef(*c1));
1323 if (n == 2) {
1324 auto const s2 = toStringWithNotice(tvAsVariant(*c2));
1325 tvAsVariant(*c2) = concat(s2, s1);
1326 assertx(c2->m_data.pstr->checkCount());
1327 } else if (n == 3) {
1328 auto const c3 = vmStack().indC(2);
1329 auto const s3 = toStringWithNotice(tvAsVariant(*c3));
1330 auto const s2 = toStringWithNotice(tvAsCVarRef(*c2));
1331 tvAsVariant(*c3) = concat3(s3, s2, s1);
1332 assertx(c3->m_data.pstr->checkCount());
1333 } else {
1334 assertx(n == 4);
1335 auto const c3 = vmStack().indC(2);
1336 auto const c4 = vmStack().indC(3);
1337 auto const s4 = toStringWithNotice(tvAsVariant(*c4));
1338 auto const s3 = toStringWithNotice(tvAsCVarRef(*c3));
1339 auto const s2 = toStringWithNotice(tvAsCVarRef(*c2));
1340 tvAsVariant(*c4) = concat4(s4, s3, s2, s1);
1341 assertx(c4->m_data.pstr->checkCount());
1344 for (int i = 1; i < n; ++i) {
1345 vmStack().popC();
1349 OPTBLD_INLINE void iopNot() {
1350 TypedValue* c1 = vmStack().topC();
1351 tvAsVariant(*c1) = !tvAsVariant(*c1).toBoolean();
1354 template<class Fn>
1355 OPTBLD_INLINE void implTvBinOp(Fn fn) {
1356 auto const c1 = vmStack().topC();
1357 auto const c2 = vmStack().indC(1);
1358 auto const result = fn(*c2, *c1);
1359 tvDecRefGen(c2);
1360 *c2 = result;
1361 vmStack().popC();
1364 template<class Fn>
1365 OPTBLD_INLINE void implTvBinOpBool(Fn fn) {
1366 auto const c1 = vmStack().topC();
1367 auto const c2 = vmStack().indC(1);
1368 bool const result = fn(*c2, *c1);
1369 tvDecRefGen(c2);
1370 *c2 = make_tv<KindOfBoolean>(result);
1371 vmStack().popC();
1374 template<class Fn>
1375 OPTBLD_INLINE void implTvBinOpInt64(Fn fn) {
1376 auto const c1 = vmStack().topC();
1377 auto const c2 = vmStack().indC(1);
1378 auto const result = fn(*c2, *c1);
1379 tvDecRefGen(c2);
1380 *c2 = make_tv<KindOfInt64>(result);
1381 vmStack().popC();
1384 OPTBLD_INLINE void iopAdd() {
1385 implTvBinOp(tvAdd);
1388 OPTBLD_INLINE void iopSub() {
1389 implTvBinOp(tvSub);
1392 OPTBLD_INLINE void iopMul() {
1393 implTvBinOp(tvMul);
1396 OPTBLD_INLINE void iopAddO() {
1397 implTvBinOp(tvAddO);
1400 OPTBLD_INLINE void iopSubO() {
1401 implTvBinOp(tvSubO);
1404 OPTBLD_INLINE void iopMulO() {
1405 implTvBinOp(tvMulO);
1408 OPTBLD_INLINE void iopDiv() {
1409 implTvBinOp(tvDiv);
1412 OPTBLD_INLINE void iopPow() {
1413 implTvBinOp(tvPow);
1416 OPTBLD_INLINE void iopMod() {
1417 implTvBinOp(tvMod);
1420 OPTBLD_INLINE void iopBitAnd() {
1421 implTvBinOp(tvBitAnd);
1424 OPTBLD_INLINE void iopBitOr() {
1425 implTvBinOp(tvBitOr);
1428 OPTBLD_INLINE void iopBitXor() {
1429 implTvBinOp(tvBitXor);
1432 OPTBLD_INLINE void iopSame() {
1433 implTvBinOpBool(tvSame);
1436 OPTBLD_INLINE void iopNSame() {
1437 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1438 return !tvSame(c1, c2);
1442 OPTBLD_INLINE void iopEq() {
1443 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1444 return tvEqual(c1, c2);
1448 OPTBLD_INLINE void iopNeq() {
1449 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1450 return !tvEqual(c1, c2);
1454 OPTBLD_INLINE void iopLt() {
1455 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1456 return tvLess(c1, c2);
1460 OPTBLD_INLINE void iopLte() {
1461 implTvBinOpBool(tvLessOrEqual);
1464 OPTBLD_INLINE void iopGt() {
1465 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1466 return tvGreater(c1, c2);
1470 OPTBLD_INLINE void iopGte() {
1471 implTvBinOpBool(tvGreaterOrEqual);
1474 OPTBLD_INLINE void iopCmp() {
1475 implTvBinOpInt64([&] (TypedValue c1, TypedValue c2) {
1476 return tvCompare(c1, c2);
1480 OPTBLD_INLINE void iopShl() {
1481 implTvBinOp(tvShl);
1484 OPTBLD_INLINE void iopShr() {
1485 implTvBinOp(tvShr);
1488 OPTBLD_INLINE void iopBitNot() {
1489 tvBitNot(*vmStack().topC());
1492 OPTBLD_INLINE void iopCastBool() {
1493 TypedValue* c1 = vmStack().topC();
1494 tvCastToBooleanInPlace(c1);
1497 OPTBLD_INLINE void iopCastInt() {
1498 TypedValue* c1 = vmStack().topC();
1499 tvCastToInt64InPlace(c1);
1502 OPTBLD_INLINE void iopCastDouble() {
1503 TypedValue* c1 = vmStack().topC();
1504 tvCastToDoubleInPlace(c1);
1507 OPTBLD_INLINE void iopCastString() {
1508 TypedValue* c1 = vmStack().topC();
1509 tvCastToStringInPlace(c1);
1512 OPTBLD_INLINE void iopCastVec() {
1513 TypedValue* c1 = vmStack().topC();
1514 if (tvIsVec(c1)) return;
1515 tvCastToVecInPlace(c1);
1516 maybeMakeBespokeArrayAfterCast(c1);
1519 OPTBLD_INLINE void iopCastDict() {
1520 TypedValue* c1 = vmStack().topC();
1521 if (tvIsDict(c1)) return;
1522 tvCastToDictInPlace(c1);
1523 maybeMakeBespokeArrayAfterCast(c1);
1526 OPTBLD_INLINE void iopCastKeyset() {
1527 TypedValue* c1 = vmStack().topC();
1528 if (tvIsKeyset(c1)) return;
1529 tvCastToKeysetInPlace(c1);
1530 maybeMakeBespokeArrayAfterCast(c1);
1533 OPTBLD_INLINE void iopDblAsBits() {
1534 auto c = vmStack().topC();
1535 if (UNLIKELY(!isDoubleType(c->m_type))) {
1536 vmStack().replaceC<KindOfInt64>(0);
1537 return;
1539 c->m_type = KindOfInt64;
1542 ALWAYS_INLINE
1543 bool implInstanceOfHelper(const StringData* str1, TypedValue* c2) {
1544 const NamedEntity* rhs = NamedEntity::get(str1, false);
1545 // Because of other codepaths, an un-normalized name might enter the
1546 // table without a Class* so we need to check if it's there.
1547 if (LIKELY(rhs && rhs->getCachedClass() != nullptr)) {
1548 return tvInstanceOf(c2, rhs);
1550 return false;
1553 OPTBLD_INLINE void iopInstanceOf() {
1554 TypedValue* c1 = vmStack().topC(); // c2 instanceof c1
1555 TypedValue* c2 = vmStack().indC(1);
1556 bool r = false;
1557 if (isStringType(c1->m_type)) {
1558 r = implInstanceOfHelper(c1->m_data.pstr, c2);
1559 } else if (c1->m_type == KindOfObject) {
1560 if (c2->m_type == KindOfObject) {
1561 ObjectData* lhs = c2->m_data.pobj;
1562 ObjectData* rhs = c1->m_data.pobj;
1563 r = lhs->instanceof(rhs->getVMClass());
1565 } else if (isClassType(c1->m_type)) {
1566 // TODO (T29639296) Exploit class pointer further
1567 r = implInstanceOfHelper(c1->m_data.pclass->name(), c2);
1568 } else {
1569 raise_error("Class name must be a valid object or a string");
1571 vmStack().popC();
1572 vmStack().replaceC<KindOfBoolean>(r);
1575 OPTBLD_INLINE void iopInstanceOfD(Id id) {
1576 const NamedEntity* ne = vmfp()->func()->unit()->lookupNamedEntityId(id);
1577 TypedValue* c1 = vmStack().topC();
1578 bool r = tvInstanceOf(c1, ne);
1579 vmStack().replaceC<KindOfBoolean>(r);
1582 OPTBLD_INLINE void iopIsLateBoundCls() {
1583 auto const cls = frameStaticClass(vmfp());
1584 if (!cls) {
1585 raise_error(HPHP::Strings::THIS_OUTSIDE_CLASS);
1587 if (isTrait(cls)) {
1588 raise_error("\"is\" and \"as\" operators cannot be used with a trait");
1590 auto const c1 = vmStack().topC();
1591 bool r = tvInstanceOf(c1, cls);
1592 vmStack().replaceC<KindOfBoolean>(r);
1595 namespace {
1597 ArrayData* resolveAndVerifyTypeStructureHelper(
1598 uint32_t n, const TypedValue* values, bool suppress, bool isOrAsOp) {
1599 Class* declaringCls = nullptr;
1600 Class* calledCls = nullptr;
1601 auto const v = *values;
1602 isValidTSType(v, true);
1603 if (typeStructureCouldBeNonStatic(v.m_data.parr)) {
1604 auto const frame = vmfp();
1605 if (frame && frame->func()) {
1606 declaringCls = frame->func()->cls();
1607 if (declaringCls) {
1608 calledCls = frame->hasClass()
1609 ? frame->getClass()
1610 : frame->getThis()->getVMClass();
1614 return jit::resolveTypeStructHelper(n, values, declaringCls,
1615 calledCls, suppress, isOrAsOp);
1618 ALWAYS_INLINE Array maybeResolveAndErrorOnTypeStructure(
1619 TypeStructResolveOp op,
1620 bool suppress
1622 auto const a = vmStack().topC();
1623 isValidTSType(*a, true);
1624 auto const arr = a->m_data.parr;
1626 if (op == TypeStructResolveOp::Resolve) {
1627 auto const result = resolveAndVerifyTypeStructureHelper(1, vmStack().topC(),
1628 suppress, true);
1629 if (arr == result) return ArrNR(arr);
1630 return Array::attach(result);
1633 errorOnIsAsExpressionInvalidTypes(ArrNR(arr), false);
1634 return ArrNR(arr);
1637 } // namespace
1639 OPTBLD_INLINE void iopIsTypeStructC(TypeStructResolveOp op) {
1640 auto const c = vmStack().indC(1);
1641 auto const ts = maybeResolveAndErrorOnTypeStructure(op, true);
1642 auto b = checkTypeStructureMatchesTV(ts, *c);
1643 vmStack().popC(); // pop c
1644 vmStack().replaceC<KindOfBoolean>(b);
1647 OPTBLD_INLINE void iopThrowAsTypeStructException() {
1648 auto const c = vmStack().indC(1);
1649 auto const ts =
1650 maybeResolveAndErrorOnTypeStructure(TypeStructResolveOp::Resolve, false);
1651 std::string givenType, expectedType, errorKey;
1652 if (!checkTypeStructureMatchesTV(ts, *c,
1653 givenType, expectedType, errorKey)) {
1654 vmStack().popC(); // pop c
1655 throwTypeStructureDoesNotMatchTVException(
1656 givenType, expectedType, errorKey);
1658 always_assert(false && "Invalid bytecode sequence: Instruction must throw");
1661 OPTBLD_INLINE void iopCombineAndResolveTypeStruct(uint32_t n) {
1662 assertx(n != 0);
1663 auto const resolved =
1664 resolveAndVerifyTypeStructureHelper(n, vmStack().topC(), false, false);
1665 vmStack().popC(); // pop the first TS
1666 vmStack().ndiscard(n-1);
1667 vmStack().pushArrayLike(resolved);
1670 OPTBLD_INLINE void iopRecordReifiedGeneric() {
1671 auto const tsList = vmStack().topC();
1672 if (!tvIsVec(tsList)) {
1673 raise_error("Invalid type-structure list in RecordReifiedGeneric");
1675 // recordReifiedGenericsAndGetTSList decrefs the tsList
1676 auto const result =
1677 jit::recordReifiedGenericsAndGetTSList(tsList->m_data.parr);
1678 vmStack().discard();
1679 vmStack().pushStaticArrayLike(result);
1682 OPTBLD_INLINE void iopCheckReifiedGenericMismatch() {
1683 Class* cls = arGetContextClass(vmfp());
1684 if (!cls) raise_error("No class scope is active");
1685 auto const c = vmStack().topC();
1686 if (!tvIsVec(c)) {
1687 raise_error("Invalid type-structure list in CheckReifiedGenericMismatch");
1689 checkClassReifiedGenericMismatch(cls, c->m_data.parr);
1690 vmStack().popC();
1693 OPTBLD_INLINE void iopPrint() {
1694 TypedValue* c1 = vmStack().topC();
1695 g_context->write(tvAsVariant(*c1).toString());
1696 vmStack().replaceC<KindOfInt64>(1);
1699 OPTBLD_INLINE void iopClone() {
1700 TypedValue* tv = vmStack().topTV();
1701 if (tv->m_type != KindOfObject) {
1702 raise_error("clone called on non-object");
1704 auto newobj = tv->m_data.pobj->clone();
1705 vmStack().popTV();
1706 vmStack().pushObjectNoRc(newobj);
1709 OPTBLD_INLINE void iopExit() {
1710 int exitCode = 0;
1711 TypedValue* c1 = vmStack().topC();
1712 if (c1->m_type == KindOfInt64) {
1713 exitCode = c1->m_data.num;
1714 } else {
1715 g_context->write(tvAsVariant(*c1).toString());
1717 vmStack().popC();
1718 vmStack().pushNull();
1719 throw ExitException(exitCode);
1722 OPTBLD_INLINE void iopFatal(FatalOp kind_char) {
1723 TypedValue* top = vmStack().topTV();
1724 std::string msg;
1725 if (isStringType(top->m_type)) {
1726 msg = top->m_data.pstr->data();
1727 } else {
1728 msg = "Fatal error message not a string";
1730 vmStack().popTV();
1732 switch (kind_char) {
1733 case FatalOp::RuntimeOmitFrame:
1734 raise_error_without_first_frame(msg);
1735 break;
1736 case FatalOp::Runtime:
1737 case FatalOp::Parse:
1738 raise_error(msg);
1739 break;
1743 OPTBLD_INLINE void jmpSurpriseCheck(Offset offset) {
1744 if (offset <= 0 && UNLIKELY(checkSurpriseFlags())) {
1745 auto const flags = handle_request_surprise();
1747 // Memory Threhsold callback should also be fired here
1748 if (flags & MemThresholdFlag) {
1749 EventHook::DoMemoryThresholdCallback();
1751 if (flags & TimedOutFlag) {
1752 RID().invokeUserTimeoutCallback();
1757 OPTBLD_INLINE void iopJmp(PC& pc, PC targetpc) {
1758 jmpSurpriseCheck(targetpc - pc);
1759 pc = targetpc;
1762 OPTBLD_INLINE void iopJmpNS(PC& pc, PC targetpc) {
1763 pc = targetpc;
1766 template<Op op>
1767 OPTBLD_INLINE void jmpOpImpl(PC& pc, PC targetpc) {
1768 static_assert(op == OpJmpZ || op == OpJmpNZ,
1769 "jmpOpImpl should only be used by JmpZ and JmpNZ");
1770 jmpSurpriseCheck(targetpc - pc);
1772 TypedValue* c1 = vmStack().topC();
1773 if (c1->m_type == KindOfInt64 || c1->m_type == KindOfBoolean) {
1774 int64_t n = c1->m_data.num;
1775 vmStack().popX();
1776 if (op == OpJmpZ ? n == 0 : n != 0) pc = targetpc;
1777 } else {
1778 auto const cond = tvAsCVarRef(*c1).toBoolean();
1779 vmStack().popC();
1780 if (op == OpJmpZ ? !cond : cond) pc = targetpc;
1784 OPTBLD_INLINE void iopJmpZ(PC& pc, PC targetpc) {
1785 jmpOpImpl<OpJmpZ>(pc, targetpc);
1788 OPTBLD_INLINE void iopJmpNZ(PC& pc, PC targetpc) {
1789 jmpOpImpl<OpJmpNZ>(pc, targetpc);
1792 OPTBLD_INLINE void iopSelect() {
1793 auto const cond = [&]{
1794 auto c = vmStack().topC();
1795 if (c->m_type == KindOfInt64 || c->m_type == KindOfBoolean) {
1796 auto const val = (bool)c->m_data.num;
1797 vmStack().popX();
1798 return val;
1799 } else {
1800 auto const val = tvAsCVarRef(*c).toBoolean();
1801 vmStack().popC();
1802 return val;
1804 }();
1806 if (cond) {
1807 auto const t = *vmStack().topC();
1808 vmStack().discard();
1809 vmStack().replaceC(t);
1810 } else {
1811 vmStack().popC();
1815 OPTBLD_INLINE
1816 void iopSwitch(PC origpc, PC& pc, SwitchKind kind, int64_t base,
1817 imm_array<Offset> jmptab) {
1818 auto const veclen = jmptab.size;
1819 assertx(veclen > 0);
1820 TypedValue* val = vmStack().topTV();
1821 if (kind == SwitchKind::Unbounded) {
1822 assertx(val->m_type == KindOfInt64);
1823 // Continuation switch: no bounds checking needed
1824 int64_t label = val->m_data.num;
1825 vmStack().popX();
1826 assertx(label >= 0 && label < veclen);
1827 pc = origpc + jmptab[label];
1828 return;
1831 const auto num = val->m_data.num;
1832 const auto offset =
1833 !tvIsInt(val) || num < base || num >= (base + veclen - 2)
1834 ? veclen - 1
1835 : num - base;
1837 pc = origpc + jmptab[offset];
1838 vmStack().discard();
1841 OPTBLD_INLINE
1842 void iopSSwitch(PC origpc, PC& pc, imm_array<StrVecItem> jmptab) {
1843 auto const veclen = jmptab.size;
1844 assertx(veclen > 1);
1845 TypedValue* val = vmStack().topTV();
1847 if (tvIsString(val) || tvIsClass(val) || tvIsLazyClass(val)) {
1848 unsigned cases = veclen - 1; // the last vector item is the default case
1849 Unit* u = vmfp()->func()->unit();
1850 for (unsigned i = 0; i < cases; ++i) {
1851 auto item = jmptab[i];
1852 const StringData* str = u->lookupLitstrId(item.str);
1853 if (tvEqual(*val, str)) {
1854 pc = origpc + item.dest;
1855 vmStack().popC();
1856 return;
1861 // default case
1862 pc = origpc + jmptab[veclen - 1].dest;
1863 vmStack().popC();
1867 * jitReturnPre and jitReturnPost are used by RetC/V, CreateCont, NativeImpl,
1868 * Yield, and YieldK to perform a few tasks related to interpreting out of a
1869 * frame:
1871 * - If the current frame was entered in the TC and the jit is now off, we
1872 * throw a VMSwitchMode at the beginning of the bytecode to execute the
1873 * call's catch block (if present) before performing the return.
1874 * - If the current frame was entered in the TC and the jit is still on,
1875 * we wait until the end of the bytecode and throw a VMResumeTC, to return to
1876 * our translated caller rather than interpreting back into it.
1877 * - If the current frame was entered by the interpreter but was active when
1878 * the jit called MCGenerator::handleResume() (meaning it's the saved value
1879 * of %rbp in handleResume()'s stack frame), throw a VMResumeTC to reenter
1880 * handleResume(). This is necessary to update the value of %rbp in the TC
1881 * frame, so the unwinder doesn't read from a dead VM frame if something
1882 * throws from the interpreter later on.
1884 namespace {
1885 struct JitReturn {
1886 uint64_t savedRip;
1887 ActRec* fp;
1888 ActRec* sfp;
1889 Offset callOff;
1892 OPTBLD_INLINE JitReturn jitReturnPre(ActRec* fp) {
1893 assertx(fp->m_savedRip);
1894 return {fp->m_savedRip, fp, fp->sfp(), fp->callOffset()};
1897 OPTBLD_INLINE JitResumeAddr jitReturnPost(JitReturn retInfo) {
1898 assertx(isCallToExit(retInfo.savedRip) == (retInfo.sfp == nullptr));
1899 assertx(isCallToExit(retInfo.savedRip) == (vmfp() == nullptr));
1901 if (!isReturnHelper(retInfo.savedRip)) {
1902 // This frame is either the first frame in this VM nesting level, or it was
1903 // called by a translated code so we can't interpret out of it. Either way,
1904 // use the saved rip, which will either use the callToExit helper to exit
1905 // the TC, or resume in the TC right after the call to us. This situation
1906 // most commonly happens when we interpOne a RetC due to some weird case.
1907 assertx(isCallToExit(retInfo.savedRip) || RID().getJit());
1908 return JitResumeAddr::ret(TCA(retInfo.savedRip));
1911 // Consider a situation with a Hack function f() that calls another function
1912 // g(). If the call is interpreted, then we spend some time in the TC inside
1913 // g(), then eventually end in dispatchBB() (called by
1914 // MCGenerator::handleResume()) for g()'s RetC, the logic here kicks in.
1916 // g()'s VM frame was in %rbp when the TC called handleResume(), so it's
1917 // saved somewhere in handleResume()'s stack frame. If we return out of that
1918 // frame and keep going in the interpreter, that saved %rbp will be pointing
1919 // to a garbage VM frame. This is a problem if something needs to throw an
1920 // exception up through handleResume() and the TC frames above it, since the
1921 // C++ unwinder will attempt to treat parts of the popped VM frame as
1922 // pointers and segfault.
1924 // To avoid running with this dangling saved %rbp a few frames up, we
1925 // immediately throw an exception that is "caught" by the TC frame that
1926 // called handleResume(). We resume execution in the TC which reloads the new
1927 // vmfp() into %rbp, then handleResume() is called again, this time with a
1928 // live VM frame in %rbp.
1929 if (vmJitCalledFrame() == retInfo.fp) {
1930 FTRACE(1, "Returning from frame {}; resuming", vmJitCalledFrame());
1931 return JitResumeAddr::helper(jit::tc::ustubs().resumeHelperFromInterp);
1934 // This frame was called from the interpreter, so it's ok to also return
1935 // using the interpreter.
1936 return JitResumeAddr::none();
1939 OPTBLD_INLINE void returnToCaller(PC& pc, ActRec* sfp, Offset callOff) {
1940 vmfp() = sfp;
1941 pc = LIKELY(sfp != nullptr)
1942 ? skipCall(sfp->func()->entry() + callOff)
1943 : nullptr;
1948 template <bool suspended>
1949 OPTBLD_INLINE JitResumeAddr ret(PC& pc) {
1950 assertx(!suspended || vmfp()->func()->isAsyncFunction());
1951 assertx(!suspended || !isResumed(vmfp()));
1953 // Grab info from callee's ActRec.
1954 auto const fp = vmfp();
1955 auto const func = fp->func();
1956 auto const sfp = fp->sfp();
1957 auto const jitReturn = jitReturnPre(fp);
1959 // Get the return value.
1960 TypedValue retval = *vmStack().topTV();
1961 vmStack().discard();
1963 assertx(
1964 !suspended || (tvIsObject(retval) && retval.m_data.pobj->isWaitHandle())
1967 // Free $this and local variables. Calls FunctionReturn hook. The return
1968 // value must be removed from the stack, or the unwinder would try to free it
1969 // if the hook throws---but the event hook routine decrefs the return value
1970 // in that case if necessary.
1971 // in that case if necessary.
1972 fp->setLocalsDecRefd();
1973 frame_free_locals_inl(
1975 func->numLocals(),
1976 &retval,
1977 EventHook::Source::Interpreter
1980 if (LIKELY(!isResumed(fp))) {
1981 // If in an eagerly executed async function, wrap the return value into
1982 // succeeded StaticWaitHandle. Async eager return requests are currently
1983 // not respected, as we don't have a way to obtain the async eager offset.
1984 if (UNLIKELY(func->isAsyncFunction()) && !suspended) {
1985 auto const& retvalCell = *tvAssertPlausible(&retval);
1986 // Heads up that we're assuming CreateSucceeded can't throw, or we won't
1987 // decref the return value. (It can't right now.)
1988 auto const waitHandle = c_StaticWaitHandle::CreateSucceeded(retvalCell);
1989 tvCopy(make_tv<KindOfObject>(waitHandle), retval);
1992 // Free ActRec and store the return value.
1993 vmStack().ndiscard(func->numSlotsInFrame());
1994 vmStack().ret();
1995 *vmStack().topTV() = retval;
1996 assertx(vmStack().topTV() == fp->retSlot());
1997 // In case async eager return was requested by the caller, pretend that
1998 // we did not finish eagerly as we already boxed the value.
1999 vmStack().topTV()->m_aux.u_asyncEagerReturnFlag = 0;
2000 } else if (func->isAsyncFunction()) {
2001 // Mark the async function as succeeded and store the return value.
2002 assertx(!sfp);
2003 auto wh = frame_afwh(fp);
2004 wh->ret(retval);
2005 decRefObj(wh);
2006 } else if (func->isAsyncGenerator()) {
2007 // Mark the async generator as finished.
2008 assertx(isNullType(retval.m_type));
2009 auto const gen = frame_async_generator(fp);
2010 auto const eagerResult = gen->ret();
2011 if (eagerResult) {
2012 // Eager execution => return StaticWaitHandle.
2013 assertx(sfp);
2014 vmStack().pushObjectNoRc(eagerResult);
2015 } else {
2016 // Resumed execution => return control to the scheduler.
2017 assertx(!sfp);
2019 } else if (func->isNonAsyncGenerator()) {
2020 // Mark the generator as finished and store the return value.
2021 frame_generator(fp)->ret(retval);
2023 // Push return value of next()/send()/raise().
2024 vmStack().pushNull();
2025 } else {
2026 not_reached();
2029 // Return control to the caller.
2030 returnToCaller(pc, sfp, jitReturn.callOff);
2032 return jitReturnPost(jitReturn);
2035 OPTBLD_INLINE JitResumeAddr iopRetC(PC& pc) {
2036 return ret<false>(pc);
2039 OPTBLD_INLINE JitResumeAddr iopRetCSuspended(PC& pc) {
2040 assertx(vmfp()->func()->isAsyncFunction());
2041 assertx(!isResumed(vmfp()));
2042 return ret<true>(pc);
2045 OPTBLD_INLINE JitResumeAddr iopRetM(PC& pc, uint32_t numRet) {
2046 auto const jitReturn = jitReturnPre(vmfp());
2048 req::vector<TypedValue> retvals;
2049 retvals.reserve(numRet);
2051 for (int i = numRet - 1; i >= 0; i--) {
2052 retvals.push_back(*vmStack().indC(i));
2055 vmStack().ndiscard(numRet);
2057 // Free $this and local variables. Calls FunctionReturn hook. The return
2058 // value must be removed from the stack, or the unwinder would try to free it
2059 // if the hook throws---but the event hook routine decrefs the return value
2060 // in that case if necessary.
2061 frame_free_locals_inl(
2062 vmfp(),
2063 vmfp()->func()->numLocals(),
2064 &retvals[0],
2065 EventHook::Source::Interpreter
2068 assertx(!vmfp()->func()->isGenerator() && !vmfp()->func()->isAsync());
2070 // Grab caller info from ActRec.
2071 ActRec* sfp = vmfp()->sfp();
2072 Offset callOff = vmfp()->callOffset();
2074 // Free ActRec and store the return value.
2075 vmStack().ndiscard(vmfp()->func()->numSlotsInFrame());
2076 vmStack().ret();
2078 // Discard scratch space for return values allocated for multi return FCall
2079 vmStack().ndiscard(numRet - 1);
2080 *vmStack().topTV() = retvals[1];
2082 for (int i = 2; i < numRet; i++) {
2083 *vmStack().allocTV() = retvals[i];
2086 // Store the actual return value at the top of the stack
2087 *vmStack().allocTV() = retvals[0];
2089 // Return control to the caller.
2090 returnToCaller(pc, sfp, callOff);
2092 return jitReturnPost(jitReturn);
2095 OPTBLD_INLINE void iopThrow(PC&) {
2096 TypedValue* c1 = vmStack().topC();
2097 if (c1->m_type != KindOfObject ||
2098 !c1->m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) {
2099 raise_error("Exceptions must implement the Throwable interface.");
2101 auto obj = Object::attach(c1->m_data.pobj);
2102 vmStack().discard();
2103 DEBUGGER_ATTACHED_ONLY(phpDebuggerExceptionThrownHook(obj.get()));
2104 throw req::root<Object>(std::move(obj));
2107 OPTBLD_INLINE void iopThrowNonExhaustiveSwitch() {
2108 SystemLib::throwRuntimeExceptionObject(String(Strings::NONEXHAUSTIVE_SWITCH));
2111 OPTBLD_INLINE void iopRaiseClassStringConversionWarning() {
2112 if (RuntimeOption::EvalRaiseClassConversionWarning) {
2113 raise_class_to_string_conversion_warning();
2117 OPTBLD_INLINE void iopResolveClass(Id id) {
2118 auto const cname = vmfp()->unit()->lookupLitstrId(id);
2119 auto const class_ = Class::load(cname);
2120 // TODO (T61651936): Disallow implicit conversion to string
2121 if (class_ == nullptr) {
2122 if (RuntimeOption::EvalRaiseClassConversionWarning) {
2123 raise_class_to_string_conversion_warning();
2125 vmStack().pushStaticString(cname);
2127 else {
2128 vmStack().pushClass(class_);
2132 OPTBLD_INLINE void iopLazyClass(Id id) {
2133 auto const cname = vmfp()->unit()->lookupLitstrId(id);
2134 auto const lclass = LazyClassData::create(cname);
2135 vmStack().pushLazyClass(lclass);
2138 OPTBLD_INLINE void iopClassGetC() {
2139 auto const cell = vmStack().topC();
2140 if (isStringType(cell->m_type)) {
2141 raise_str_to_class_notice(cell->m_data.pstr);
2143 auto const cls = lookupClsRef(cell);
2144 vmStack().popC();
2145 vmStack().pushClass(cls);
2148 OPTBLD_INLINE void iopClassGetTS() {
2149 auto const cell = vmStack().topC();
2150 if (!tvIsDict(cell)) {
2151 raise_error("Reified type must be a type structure");
2153 auto const ts = cell->m_data.parr;
2154 auto const classname_field = ts->get(s_classname.get());
2155 if (!classname_field.is_init()) {
2156 raise_error("You cannot create a new instance of this type as "
2157 "it is not a class");
2159 assertx(isStringType(classname_field.type()));
2160 auto const name = classname_field.val().pstr;
2161 auto const generics_field = ts->get(s_generic_types.get());
2162 ArrayData* reified_types = nullptr;
2163 if (generics_field.is_init()) {
2164 reified_types = generics_field.val().parr;
2165 auto const mangledTypeName =
2166 makeStaticString(mangleReifiedGenericsName(reified_types));
2167 reified_types->incRefCount();
2168 reified_types = addToReifiedGenericsTable(mangledTypeName, reified_types);
2170 auto const cls = Class::load(name);
2171 if (cls == nullptr) {
2172 raise_error(Strings::UNKNOWN_CLASS, name->data());
2175 vmStack().popC();
2176 vmStack().pushClass(cls);
2177 if (reified_types) {
2178 vmStack().pushStaticArrayLike(reified_types);
2179 } else {
2180 vmStack().pushNull();
2184 static void raise_undefined_local(ActRec* fp, LocalName pind) {
2185 assertx(pind < fp->func()->numNamedLocals());
2186 assertx(fp->func()->localVarName(pind));
2187 if (debug) {
2188 auto vm = &*g_context;
2189 always_assert_flog(
2190 pind != kInvalidLocalName,
2191 "HHBBC incorrectly removed name info for a local in {}:{}",
2192 vm->getContainingFileName()->data(),
2193 vm->getLine()
2196 SystemLib::throwUndefinedVariableExceptionObject(
2197 folly::sformat("Undefined variable: {}",
2198 fp->func()->localVarName(pind)->data()));
2201 static inline void cgetl_inner_body(tv_rval fr, TypedValue* to) {
2202 assertx(type(fr) != KindOfUninit);
2203 tvDup(*fr, *to);
2206 OPTBLD_INLINE void cgetl_body(ActRec* fp,
2207 tv_rval fr,
2208 TypedValue* to,
2209 LocalName lname,
2210 bool warn) {
2211 if (type(fr) == KindOfUninit) {
2212 // `to' is uninitialized here, so we need to tvWriteNull before
2213 // possibly causing stack unwinding.
2214 tvWriteNull(*to);
2215 if (warn) raise_undefined_local(fp, lname);
2216 } else {
2217 cgetl_inner_body(fr, to);
2221 OPTBLD_INLINE void iopCGetL(named_local_var fr) {
2222 TypedValue* to = vmStack().allocC();
2223 cgetl_body(vmfp(), fr.lval, to, fr.name, true);
2226 OPTBLD_INLINE void iopCGetQuietL(tv_lval fr) {
2227 TypedValue* to = vmStack().allocC();
2228 cgetl_body(vmfp(), fr, to, kInvalidLocalName, false);
2231 OPTBLD_INLINE void iopCUGetL(tv_lval fr) {
2232 auto to = vmStack().allocTV();
2233 tvDup(*fr, *to);
2236 OPTBLD_INLINE void iopCGetL2(named_local_var fr) {
2237 TypedValue* oldTop = vmStack().topTV();
2238 TypedValue* newTop = vmStack().allocTV();
2239 memcpy(newTop, oldTop, sizeof *newTop);
2240 TypedValue* to = oldTop;
2241 cgetl_body(vmfp(), fr.lval, to, fr.name, true);
2244 OPTBLD_INLINE void iopPushL(tv_lval locVal) {
2245 assertx(type(locVal) != KindOfUninit);
2246 TypedValue* dest = vmStack().allocTV();
2247 *dest = *locVal;
2248 type(locVal) = KindOfUninit;
2251 OPTBLD_INLINE void iopCGetG() {
2252 StringData* name;
2253 TypedValue* to = vmStack().topTV();
2254 auto const fr = lookup_gbl(vmfp(), name, to);
2255 SCOPE_EXIT { decRefStr(name); };
2256 tvDecRefGen(to);
2257 if (!fr || type(fr) == KindOfUninit) {
2258 tvWriteNull(*to);
2259 } else {
2260 cgetl_inner_body(fr, to);
2264 struct SpropState {
2265 SpropState(Stack&, bool ignoreLateInit);
2266 ~SpropState();
2267 StringData* name;
2268 Class* cls;
2269 TypedValue* output;
2270 TypedValue* val;
2271 TypedValue oldNameCell;
2272 Slot slot;
2273 bool visible;
2274 bool accessible;
2275 bool constant;
2276 bool readonly;
2277 Stack& vmstack;
2280 SpropState::SpropState(Stack& vmstack, bool ignoreLateInit) : vmstack{vmstack} {
2281 auto const clsCell = vmstack.topC();
2282 auto const nameCell = output = vmstack.indTV(1);
2283 if (!isClassType(clsCell->m_type)) {
2284 raise_error("SpropState: expected class");
2286 cls = clsCell->m_data.pclass;
2287 lookup_sprop(vmfp(), cls, name, nameCell, val,
2288 slot, visible, accessible, constant, readonly, ignoreLateInit);
2289 oldNameCell = *nameCell;
2292 SpropState::~SpropState() {
2293 vmstack.discard();
2294 decRefStr(name);
2295 tvDecRefGen(oldNameCell);
2298 OPTBLD_INLINE void iopCGetS(ReadonlyOp op) {
2299 SpropState ss(vmStack(), false);
2300 if (!(ss.visible && ss.accessible)) {
2301 raise_error("Invalid static property access: %s::%s",
2302 ss.cls->name()->data(),
2303 ss.name->data());
2305 if (ss.readonly && op == ReadonlyOp::Mutable) {
2306 throw_must_be_enclosed_in_readonly(
2307 ss.cls->name()->data(), ss.name->data()
2310 tvDup(*ss.val, *ss.output);
2313 static inline void baseGImpl(tv_rval key, MOpMode mode) {
2314 auto& mstate = vmMInstrState();
2315 StringData* name;
2316 mstate.roProp = false;
2318 auto const baseVal = (mode == MOpMode::Define)
2319 ? lookupd_gbl(vmfp(), name, key)
2320 : lookup_gbl(vmfp(), name, key);
2321 SCOPE_EXIT { decRefStr(name); };
2323 if (!baseVal) {
2324 assertx(mode != MOpMode::Define);
2325 if (mode == MOpMode::Warn) {
2326 SystemLib::throwOutOfBoundsExceptionObject(
2327 folly::sformat("Undefined index: {}", name)
2330 tvWriteNull(mstate.tvTempBase);
2331 mstate.base = &mstate.tvTempBase;
2332 return;
2335 mstate.base = baseVal;
2338 OPTBLD_INLINE void iopBaseGC(uint32_t idx, MOpMode mode) {
2339 baseGImpl(vmStack().indTV(idx), mode);
2342 OPTBLD_INLINE void iopBaseGL(tv_lval loc, MOpMode mode) {
2343 baseGImpl(loc, mode);
2346 OPTBLD_INLINE void iopBaseSC(uint32_t keyIdx,
2347 uint32_t clsIdx,
2348 MOpMode mode,
2349 ReadonlyOp op) {
2350 auto& mstate = vmMInstrState();
2351 auto const clsCell = vmStack().indC(clsIdx);
2352 auto const key = vmStack().indTV(keyIdx);
2354 if (!isClassType(clsCell->m_type)) {
2355 raise_error("Attempting to obtain static base on non-class");
2357 auto const class_ = clsCell->m_data.pclass;
2359 auto const name = lookup_name(key);
2360 SCOPE_EXIT { decRefStr(name); };
2361 auto const lookup = class_->getSProp(arGetContextClass(vmfp()), name);
2362 if (!lookup.val || !lookup.accessible) {
2363 raise_error("Invalid static property access: %s::%s",
2364 class_->name()->data(),
2365 name->data());
2367 assertx(mode != MOpMode::InOut);
2368 auto const writeMode = mode == MOpMode::Define || mode == MOpMode::Unset;
2370 if (lookup.constant && writeMode) {
2371 throw_cannot_modify_static_const_prop(class_->name()->data(),
2372 name->data());
2375 mstate.roProp = false;
2376 checkReadonly(lookup.val, class_, name, lookup.readonly, op, writeMode);
2377 mstate.base = tv_lval(lookup.val);
2380 OPTBLD_INLINE void baseLImpl(named_local_var loc, MOpMode mode, ReadonlyOp op) {
2381 auto& mstate = vmMInstrState();
2382 auto const local = loc.lval;
2383 if (mode == MOpMode::Warn && type(local) == KindOfUninit) {
2384 raise_undefined_local(vmfp(), loc.name);
2387 mstate.roProp = false;
2388 if (readonlyLocalShouldThrow(*local, op)) {
2389 assertx(loc.name < vmfp()->func()->numNamedLocals());
2390 assertx(vmfp()->func()->localVarName(loc.name));
2391 auto const name = vmfp()->func()->localVarName(loc.name);
2392 throw_local_must_be_value_type(name->data());
2394 mstate.base = local;
2397 OPTBLD_INLINE void iopBaseL(named_local_var loc, MOpMode mode, ReadonlyOp op) {
2398 baseLImpl(loc, mode, op);
2401 OPTBLD_INLINE void iopBaseC(uint32_t idx, MOpMode) {
2402 auto& mstate = vmMInstrState();
2403 mstate.base = vmStack().indC(idx);
2404 mstate.roProp = false;
2407 OPTBLD_INLINE void iopBaseH() {
2408 auto& mstate = vmMInstrState();
2409 mstate.tvTempBase = make_tv<KindOfObject>(vmfp()->getThis());
2410 mstate.base = &mstate.tvTempBase;
2411 mstate.roProp = false;
2414 static OPTBLD_INLINE void propDispatch(MOpMode mode, TypedValue key, ReadonlyOp op) {
2415 auto& mstate = vmMInstrState();
2416 auto ctx = arGetContextClass(vmfp());
2418 mstate.base = [&]{
2419 switch (mode) {
2420 case MOpMode::None:
2421 return Prop<MOpMode::None>(mstate.tvTempBase, ctx, *mstate.base, key, op);
2422 case MOpMode::Warn:
2423 return Prop<MOpMode::Warn>(mstate.tvTempBase, ctx, *mstate.base, key, op);
2424 case MOpMode::Define:
2425 return Prop<MOpMode::Define,KeyType::Any>(
2426 mstate.tvTempBase, ctx, *mstate.base, key, op
2428 case MOpMode::Unset:
2429 return Prop<MOpMode::Unset>(mstate.tvTempBase, ctx, *mstate.base, key, op);
2430 case MOpMode::InOut:
2431 always_assert_flog(false, "MOpMode::InOut can only occur on Elem");
2433 always_assert(false);
2434 }();
2437 static OPTBLD_INLINE void propQDispatch(MOpMode mode, TypedValue key, ReadonlyOp op) {
2438 auto& mstate = vmMInstrState();
2439 auto ctx = arGetContextClass(vmfp());
2441 assertx(mode == MOpMode::None || mode == MOpMode::Warn);
2442 assertx(key.m_type == KindOfPersistentString);
2443 if (mode == MOpMode::None) {
2444 mstate.base = nullSafeProp<MOpMode::None>(mstate.tvTempBase, ctx,
2445 *mstate.base, key.m_data.pstr, op);
2446 } else {
2447 mstate.base = nullSafeProp<MOpMode::Warn>(mstate.tvTempBase, ctx,
2448 *mstate.base, key.m_data.pstr, op);
2452 static OPTBLD_INLINE
2453 void elemDispatch(MOpMode mode, TypedValue key) {
2454 auto& mstate = vmMInstrState();
2455 auto const b = mstate.base;
2457 auto const baseValueToLval = [&](TypedValue base) {
2458 mstate.tvTempBase = base;
2459 return tv_lval { &mstate.tvTempBase };
2462 auto const checkDimForReadonly = [&](DataType dt) {
2463 if (mstate.roProp && dt == KindOfObject) {
2464 throw_cannot_modify_readonly_collection();
2468 mstate.base = [&]{
2469 switch (mode) {
2470 case MOpMode::None:
2471 return baseValueToLval(Elem<MOpMode::None>(*b, key));
2472 case MOpMode::Warn:
2473 return baseValueToLval(Elem<MOpMode::Warn>(*b, key));
2474 case MOpMode::InOut:
2475 return baseValueToLval(Elem<MOpMode::InOut>(*b, key));
2476 case MOpMode::Define: {
2477 auto const result = ElemD(b, key);
2478 checkDimForReadonly(result.type());
2479 return result;
2481 case MOpMode::Unset: {
2482 auto const result = ElemU(b, key);
2483 checkDimForReadonly(result.type());
2484 return result;
2487 always_assert(false);
2488 }();
2491 static inline TypedValue key_tv(MemberKey key) {
2492 switch (key.mcode) {
2493 case MW:
2494 return TypedValue{};
2495 case MEL: case MPL: {
2496 auto const local = frame_local(vmfp(), key.local.id);
2497 if (type(local) == KindOfUninit) {
2498 raise_undefined_local(vmfp(), key.local.name);
2499 return make_tv<KindOfNull>();
2501 return tvClassToString(*local);
2503 case MEC: case MPC:
2504 return tvClassToString(*vmStack().indTV(key.iva));
2505 case MEI:
2506 return make_tv<KindOfInt64>(key.int64);
2507 case MET: case MPT: case MQT:
2508 return make_tv<KindOfPersistentString>(key.litstr);
2510 not_reached();
2513 static OPTBLD_INLINE void dimDispatch(MOpMode mode, MemberKey mk) {
2514 auto const key = key_tv(mk);
2515 if (mk.mcode == MQT) {
2516 propQDispatch(mode, key, mk.rop);
2517 } else if (mcodeIsProp(mk.mcode)) {
2518 propDispatch(mode, key, mk.rop);
2519 } else if (mcodeIsElem(mk.mcode)) {
2520 elemDispatch(mode, key);
2521 } else {
2522 if (mode == MOpMode::Warn) raise_error("Cannot use [] for reading");
2523 auto& mstate = vmMInstrState();
2524 mstate.base = NewElem(mstate.base);
2528 OPTBLD_INLINE void iopDim(MOpMode mode, MemberKey mk) {
2529 dimDispatch(mode, mk);
2532 static OPTBLD_INLINE void mFinal(MInstrState& mstate,
2533 int32_t nDiscard,
2534 Optional<TypedValue> result) {
2535 auto& stack = vmStack();
2536 for (auto i = 0; i < nDiscard; ++i) stack.popTV();
2537 if (result) tvCopy(*result, *stack.allocTV());
2540 static OPTBLD_INLINE
2541 void queryMImpl(MemberKey mk, int32_t nDiscard, QueryMOp op) {
2542 auto& mstate = vmMInstrState();
2543 TypedValue result;
2544 switch (op) {
2545 case QueryMOp::InOut:
2546 always_assert_flog(
2547 mcodeIsElem(mk.mcode), "QueryM InOut is only compatible with Elem"
2549 // fallthrough
2550 case QueryMOp::CGet:
2551 case QueryMOp::CGetQuiet:
2552 dimDispatch(getQueryMOpMode(op), mk);
2553 tvDup(*mstate.base, result);
2554 break;
2556 case QueryMOp::Isset:
2557 result.m_type = KindOfBoolean;
2558 auto const key = key_tv(mk);
2559 if (mcodeIsProp(mk.mcode)) {
2560 auto const ctx = arGetContextClass(vmfp());
2561 result.m_data.num = IssetProp(ctx, *mstate.base, key);
2562 } else {
2563 assertx(mcodeIsElem(mk.mcode));
2564 result.m_data.num = IssetElem(*mstate.base, key);
2566 break;
2568 mFinal(mstate, nDiscard, result);
2571 OPTBLD_INLINE void iopQueryM(uint32_t nDiscard, QueryMOp subop, MemberKey mk) {
2572 queryMImpl(mk, nDiscard, subop);
2575 OPTBLD_INLINE void iopSetM(uint32_t nDiscard, MemberKey mk) {
2576 auto& mstate = vmMInstrState();
2577 auto const topC = vmStack().topC();
2579 if (mk.mcode == MW) {
2580 SetNewElem<true>(mstate.base, topC);
2581 } else {
2582 auto const key = key_tv(mk);
2583 if (mcodeIsElem(mk.mcode)) {
2584 auto const result = SetElem<true>(mstate.base, key, topC);
2585 if (result) {
2586 tvDecRefGen(topC);
2587 topC->m_type = KindOfString;
2588 topC->m_data.pstr = result;
2590 } else {
2591 auto const ctx = arGetContextClass(vmfp());
2592 try {
2593 SetProp(ctx, *mstate.base, key, *topC, mk.rop);
2594 } catch (const InvalidSetMException& exn) {
2595 assertx(!isRefcountedType(type(exn.tv())));
2596 vmStack().popC();
2597 mFinal(mstate, nDiscard, exn.tv());
2598 return;
2603 auto const result = *topC;
2604 vmStack().discard();
2605 mFinal(mstate, nDiscard, result);
2608 OPTBLD_INLINE void iopSetRangeM( uint32_t nDiscard, uint32_t size, SetRangeOp op) {
2609 auto& mstate = vmMInstrState();
2610 auto const count = tvCastToInt64(*vmStack().indC(0));
2611 auto const src = *vmStack().indC(1);
2612 auto const offset = tvCastToInt64(*vmStack().indC(2));
2614 if (op == SetRangeOp::Forward) {
2615 SetRange<false>(mstate.base, offset, src, count, size);
2616 } else {
2617 SetRange<true>(mstate.base, offset, src, count, size);
2620 mFinal(mstate, nDiscard + 3, std::nullopt);
2623 OPTBLD_INLINE void iopIncDecM(uint32_t nDiscard, IncDecOp subop, MemberKey mk) {
2624 auto const key = key_tv(mk);
2626 auto& mstate = vmMInstrState();
2627 auto const result = [&]{
2628 if (mcodeIsProp(mk.mcode)) {
2629 return IncDecProp(arGetContextClass(vmfp()), subop, *mstate.base, key);
2630 } else if (mcodeIsElem(mk.mcode)) {
2631 return IncDecElem(subop, mstate.base, key);
2632 } else {
2633 return IncDecNewElem(subop, mstate.base);
2635 }();
2637 mFinal(mstate, nDiscard, result);
2640 OPTBLD_INLINE void iopSetOpM(uint32_t nDiscard, SetOpOp subop, MemberKey mk) {
2641 auto const key = key_tv(mk);
2642 auto const rhs = vmStack().topC();
2644 auto& mstate = vmMInstrState();
2645 auto const result = [&]{
2646 if (mcodeIsProp(mk.mcode)) {
2647 return *SetOpProp(mstate.tvTempBase, arGetContextClass(vmfp()),
2648 subop, *mstate.base, key, rhs);
2649 } else if (mcodeIsElem(mk.mcode)) {
2650 return SetOpElem(subop, mstate.base, key, rhs);
2651 } else {
2652 return SetOpNewElem(subop, mstate.base, rhs);
2654 }();
2656 vmStack().popC();
2657 tvIncRefGen(result);
2658 mFinal(mstate, nDiscard, result);
2661 OPTBLD_INLINE void iopUnsetM(uint32_t nDiscard, MemberKey mk) {
2662 auto const key = key_tv(mk);
2664 auto& mstate = vmMInstrState();
2665 if (mcodeIsProp(mk.mcode)) {
2666 UnsetProp(arGetContextClass(vmfp()), *mstate.base, key);
2667 } else {
2668 assertx(mcodeIsElem(mk.mcode));
2669 UnsetElem(mstate.base, key);
2672 mFinal(mstate, nDiscard, std::nullopt);
2675 namespace {
2677 inline void checkThis(ActRec* fp) {
2678 if (!fp->func()->cls() || !fp->hasThis()) {
2679 raise_error(Strings::FATAL_NULL_THIS);
2683 OPTBLD_INLINE const TypedValue* memoGetImpl(LocalRange keys) {
2684 auto const fp = vmfp();
2685 auto const func = fp->func();
2686 assertx(func->isMemoizeWrapper());
2687 assertx(keys.first + keys.count <= func->numLocals());
2689 for (auto i = 0; i < keys.count; ++i) {
2690 auto const key = frame_local(fp, keys.first + i);
2691 if (!isIntType(type(key)) && !isStringType(type(key))) {
2692 raise_error("Memoization keys can only be ints or strings");
2696 auto const c = [&] () -> const TypedValue* {
2697 if (!func->isMethod() || func->isStatic()) {
2698 auto const lsbCls =
2699 func->isMemoizeWrapperLSB() ? fp->getClass() : nullptr;
2700 if (keys.count > 0) {
2701 auto cache =
2702 lsbCls ? rds::bindLSBMemoCache(lsbCls, func)
2703 : rds::bindStaticMemoCache(func);
2704 if (!cache.isInit()) return nullptr;
2705 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2706 if (auto getter = memoCacheGetForKeyCount(keys.count)) {
2707 return getter(*cache, keysBegin);
2709 return memoCacheGetGeneric(
2710 *cache,
2711 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2712 keysBegin
2716 auto cache =
2717 lsbCls ? rds::bindLSBMemoValue(lsbCls, func)
2718 : rds::bindStaticMemoValue(func);
2719 return cache.isInit() ? cache.get() : nullptr;
2722 checkThis(fp);
2723 auto const this_ = fp->getThis();
2724 auto const cls = func->cls();
2725 assertx(this_->instanceof(cls));
2726 assertx(cls->hasMemoSlots());
2728 auto const memoInfo = cls->memoSlotForFunc(func->getFuncId());
2730 auto const slot = UNLIKELY(this_->hasNativeData())
2731 ? this_->memoSlotNativeData(memoInfo.first, cls->getNativeDataInfo()->sz)
2732 : this_->memoSlot(memoInfo.first);
2734 if (keys.count == 0 && !memoInfo.second) {
2735 auto const val = slot->getValue();
2736 return val->m_type != KindOfUninit ? val : nullptr;
2739 auto const cache = slot->getCache();
2740 if (!cache) return nullptr;
2742 if (memoInfo.second) {
2743 if (keys.count == 0) {
2744 return memoCacheGetSharedOnly(
2745 cache,
2746 makeSharedOnlyKey(func->getFuncId())
2749 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2750 if (auto const getter = sharedMemoCacheGetForKeyCount(keys.count)) {
2751 return getter(cache, func->getFuncId(), keysBegin);
2753 return memoCacheGetGeneric(
2754 cache,
2755 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2756 keysBegin
2760 assertx(keys.count > 0);
2761 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2762 if (auto const getter = memoCacheGetForKeyCount(keys.count)) {
2763 return getter(cache, keysBegin);
2765 return memoCacheGetGeneric(
2766 cache,
2767 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2768 keysBegin
2770 }();
2772 assertx(!c || tvIsPlausible(*c));
2773 assertx(!c || c->m_type != KindOfUninit);
2774 return c;
2779 OPTBLD_INLINE void iopMemoGet(PC& pc, PC notfound, LocalRange keys) {
2780 if (auto const c = memoGetImpl(keys)) {
2781 tvDup(*c, *vmStack().allocC());
2782 } else {
2783 pc = notfound;
2787 OPTBLD_INLINE void iopMemoGetEager(PC& pc,
2788 PC notfound,
2789 PC suspended,
2790 LocalRange keys) {
2791 assertx(vmfp()->func()->isAsyncFunction());
2792 assertx(!isResumed(vmfp()));
2794 if (auto const c = memoGetImpl(keys)) {
2795 tvDup(*c, *vmStack().allocC());
2796 if (!c->m_aux.u_asyncEagerReturnFlag) {
2797 assertx(tvIsObject(c) && c->m_data.pobj->isWaitHandle());
2798 pc = suspended;
2800 } else {
2801 pc = notfound;
2805 namespace {
2807 OPTBLD_INLINE void memoSetImpl(LocalRange keys, TypedValue val) {
2808 auto const fp = vmfp();
2809 auto const func = fp->func();
2810 assertx(func->isMemoizeWrapper());
2811 assertx(keys.first + keys.count <= func->numLocals());
2812 assertx(tvIsPlausible(val));
2814 for (auto i = 0; i < keys.count; ++i) {
2815 auto const key = frame_local(fp, keys.first + i);
2816 if (!isIntType(type(key)) && !isStringType(type(key))) {
2817 raise_error("Memoization keys can only be ints or strings");
2821 if (!func->isMethod() || func->isStatic()) {
2822 auto const lsbCls = func->isMemoizeWrapperLSB() ? fp->getClass() : nullptr;
2823 if (keys.count > 0) {
2824 auto cache =
2825 lsbCls ? rds::bindLSBMemoCache(lsbCls, func)
2826 : rds::bindStaticMemoCache(func);
2827 if (!cache.isInit()) cache.initWith(nullptr);
2828 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2829 if (auto setter = memoCacheSetForKeyCount(keys.count)) {
2830 return setter(*cache, keysBegin, val);
2832 return memoCacheSetGeneric(
2833 *cache,
2834 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2835 keysBegin,
2840 auto cache =
2841 lsbCls ? rds::bindLSBMemoValue(lsbCls, func)
2842 : rds::bindStaticMemoValue(func);
2843 if (!cache.isInit()) {
2844 tvWriteUninit(*cache);
2845 cache.markInit();
2848 tvSetWithAux(val, *cache);
2849 return;
2852 checkThis(fp);
2853 auto const this_ = fp->getThis();
2854 auto const cls = func->cls();
2855 assertx(this_->instanceof(cls));
2856 assertx(cls->hasMemoSlots());
2858 this_->setAttribute(ObjectData::UsedMemoCache);
2860 auto const memoInfo = cls->memoSlotForFunc(func->getFuncId());
2862 auto slot = UNLIKELY(this_->hasNativeData())
2863 ? this_->memoSlotNativeData(memoInfo.first, cls->getNativeDataInfo()->sz)
2864 : this_->memoSlot(memoInfo.first);
2866 if (keys.count == 0 && !memoInfo.second) {
2867 tvSetWithAux(val, *slot->getValue());
2868 return;
2871 auto& cache = slot->getCacheForWrite();
2873 if (memoInfo.second) {
2874 if (keys.count == 0) {
2875 return memoCacheSetSharedOnly(
2876 cache,
2877 makeSharedOnlyKey(func->getFuncId()),
2881 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2882 if (auto const setter = sharedMemoCacheSetForKeyCount(keys.count)) {
2883 return setter(cache, func->getFuncId(), keysBegin, val);
2885 return memoCacheSetGeneric(
2886 cache,
2887 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2888 keysBegin,
2893 assertx(keys.count > 0);
2894 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2895 if (auto const setter = memoCacheSetForKeyCount(keys.count)) {
2896 return setter(cache, keysBegin, val);
2898 return memoCacheSetGeneric(
2899 cache,
2900 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2901 keysBegin,
2908 OPTBLD_INLINE void iopMemoSet(LocalRange keys) {
2909 auto val = *vmStack().topC();
2910 assertx(val.m_type != KindOfUninit);
2911 if (vmfp()->func()->isAsyncFunction()) {
2912 assertx(tvIsObject(val) && val.m_data.pobj->isWaitHandle());
2913 val.m_aux.u_asyncEagerReturnFlag = 0;
2915 memoSetImpl(keys, val);
2918 OPTBLD_INLINE void iopMemoSetEager(LocalRange keys) {
2919 assertx(vmfp()->func()->isAsyncFunction());
2920 assertx(!isResumed(vmfp()));
2921 auto val = *vmStack().topC();
2922 assertx(val.m_type != KindOfUninit);
2923 val.m_aux.u_asyncEagerReturnFlag = static_cast<uint32_t>(-1);
2924 memoSetImpl(keys, val);
2927 OPTBLD_INLINE void iopIssetG() {
2928 StringData* name;
2929 TypedValue* tv1 = vmStack().topTV();
2930 auto const lval = lookup_gbl(vmfp(), name, tv1);
2931 SCOPE_EXIT { decRefStr(name); };
2932 auto const e = lval && !tvIsNull(lval);
2933 vmStack().replaceC<KindOfBoolean>(e);
2936 OPTBLD_INLINE void iopIssetS() {
2937 SpropState ss(vmStack(), true);
2938 bool e;
2939 if (!(ss.visible && ss.accessible)) {
2940 e = false;
2941 } else {
2942 e = !tvIsNull(ss.val);
2944 ss.output->m_data.num = e;
2945 ss.output->m_type = KindOfBoolean;
2948 OPTBLD_INLINE void iopIssetL(tv_lval val) {
2949 bool ret = !is_null(val);
2950 TypedValue* topTv = vmStack().allocTV();
2951 topTv->m_data.num = ret;
2952 topTv->m_type = KindOfBoolean;
2955 OPTBLD_INLINE void iopIsUnsetL(tv_lval val) {
2956 bool ret = type(val) == KindOfUninit;
2957 TypedValue* topTv = vmStack().allocTV();
2958 topTv->m_data.num = ret;
2959 topTv->m_type = KindOfBoolean;
2962 OPTBLD_INLINE static bool isTypeHelper(TypedValue val, IsTypeOp op) {
2963 assertx(tvIsPlausible(val));
2965 switch (op) {
2966 case IsTypeOp::Null: return is_null(&val);
2967 case IsTypeOp::Bool: return is_bool(&val);
2968 case IsTypeOp::Int: return is_int(&val);
2969 case IsTypeOp::Dbl: return is_double(&val);
2970 case IsTypeOp::Vec: return is_vec(&val);
2971 case IsTypeOp::Dict: return is_dict(&val);
2972 case IsTypeOp::Keyset: return is_keyset(&val);
2973 case IsTypeOp::Obj: return is_object(&val);
2974 case IsTypeOp::Str: return is_string(&val);
2975 case IsTypeOp::Res: return tvIsResource(val);
2976 case IsTypeOp::Scalar: return HHVM_FN(is_scalar)(tvAsCVarRef(val));
2977 case IsTypeOp::ArrLike: return is_any_array(&val);
2978 case IsTypeOp::LegacyArrLike: {
2979 return HHVM_FN(is_array_marked_legacy)(tvAsCVarRef(val));
2981 case IsTypeOp::ClsMeth: return is_clsmeth(&val);
2982 case IsTypeOp::Func: return is_fun(&val);
2983 case IsTypeOp::Class: return is_class(&val);
2985 not_reached();
2988 OPTBLD_INLINE void iopIsTypeL(named_local_var loc, IsTypeOp op) {
2989 if (type(loc.lval) == KindOfUninit) {
2990 raise_undefined_local(vmfp(), loc.name);
2992 vmStack().pushBool(isTypeHelper(*loc.lval, op));
2995 OPTBLD_INLINE void iopIsTypeC(IsTypeOp op) {
2996 auto val = vmStack().topC();
2997 vmStack().replaceC(make_tv<KindOfBoolean>(isTypeHelper(*val, op)));
3000 OPTBLD_INLINE void iopAssertRATL(local_var loc, RepoAuthType rat) {
3001 if (debug) {
3002 auto const val = *loc.lval;
3003 auto const func = vmfp()->func();
3004 auto vm = &*g_context;
3005 always_assert_flog(
3006 tvMatchesRepoAuthType(val, rat),
3007 "failed assert RATL on local slot {}: maybe ${} in {}:{}, expected {},"
3008 " got {}",
3009 loc.index,
3010 loc.index < func->numNamedLocals() && func->localNames()[loc.index]
3011 ? func->localNames()[loc.index]->data()
3012 : "<unnamed/unknown>",
3013 vm->getContainingFileName()->data(),
3014 vm->getLine(),
3015 show(rat),
3016 toStringElm(val)
3021 OPTBLD_INLINE void iopAssertRATStk(uint32_t stkSlot, RepoAuthType rat) {
3022 if (debug) {
3023 auto const tv = *vmStack().indTV(stkSlot);
3024 auto vm = &*g_context;
3025 always_assert_flog(
3026 tvMatchesRepoAuthType(tv, rat),
3027 "failed assert RATStk {} in {}:{}, expected {}, got {}",
3028 stkSlot,
3029 vm->getContainingFileName()->data(),
3030 vm->getLine(),
3031 show(rat),
3032 toStringElm(tv)
3037 OPTBLD_INLINE void iopBreakTraceHint() {
3040 OPTBLD_INLINE void iopAKExists() {
3041 TypedValue* arr = vmStack().topTV();
3042 auto key = tvClassToString(*(arr + 1));
3043 bool result = HHVM_FN(array_key_exists)(tvAsCVarRef(key), tvAsCVarRef(arr));
3044 vmStack().popTV();
3045 vmStack().replaceTV<KindOfBoolean>(result);
3048 OPTBLD_INLINE void iopGetMemoKeyL(named_local_var loc) {
3049 DEBUG_ONLY auto const func = vmfp()->func();
3050 assertx(func->isMemoizeWrapper());
3051 assertx(tvIsPlausible(*loc.lval));
3053 if (UNLIKELY(type(loc.lval) == KindOfUninit)) {
3054 tvWriteNull(loc.lval);
3055 raise_undefined_local(vmfp(), loc.name);
3058 // Use the generic scheme, which is performed by
3059 // serialize_memoize_param.
3060 auto const key = HHVM_FN(serialize_memoize_param)(*loc.lval);
3061 tvCopy(key, *vmStack().allocC());
3064 OPTBLD_INLINE void iopIdx() {
3065 TypedValue* def = vmStack().topTV();
3066 auto const key = tvClassToString(*vmStack().indTV(1));
3067 TypedValue* arr = vmStack().indTV(2);
3069 if (isNullType(key.m_type)) {
3070 tvDecRefGen(arr);
3071 *arr = *def;
3072 vmStack().ndiscard(2);
3073 return;
3076 TypedValue result;
3077 if (isArrayLikeType(arr->m_type)) {
3078 result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
3079 tvAsCVarRef(&key),
3080 tvAsCVarRef(def));
3081 vmStack().popTV();
3082 } else if (arr->m_type == KindOfObject) {
3083 auto obj = arr->m_data.pobj;
3084 if (obj->isCollection() && collections::contains(obj, tvAsCVarRef(&key))) {
3085 result = collections::at(obj, &key).tv();
3086 tvIncRefGen(result);
3087 vmStack().popTV();
3088 } else {
3089 result = *def;
3090 vmStack().discard();
3092 } else if (isStringType(arr->m_type)) {
3093 // This replicates the behavior of the hack implementation of idx, which
3094 // first checks isset($arr[$idx]), then returns $arr[(int)$idx]
3095 auto str = arr->m_data.pstr;
3096 if (IssetElemString<KeyType::Any>(str, key)) {
3097 auto idx = tvCastToInt64(key);
3098 assertx(idx >= 0 && idx < str->size());
3099 result = make_tv<KindOfPersistentString>(str->getChar(idx));
3100 vmStack().popTV();
3101 } else {
3102 result = *def;
3103 vmStack().discard();
3105 } else {
3106 result = *def;
3107 vmStack().discard();
3109 vmStack().popTV();
3110 tvDecRefGen(arr);
3111 *arr = result;
3114 OPTBLD_INLINE void iopArrayIdx() {
3115 TypedValue* def = vmStack().topTV();
3116 auto const key = tvClassToString(*vmStack().indTV(1));
3117 TypedValue* arr = vmStack().indTV(2);
3118 if (isClsMethType(type(arr))) {
3119 tvCastToVecInPlace(arr);
3121 auto const result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
3122 tvAsCVarRef(&key),
3123 tvAsCVarRef(def));
3124 vmStack().popTV();
3125 vmStack().popTV();
3126 tvDecRefGen(arr);
3127 *arr = result;
3130 namespace {
3131 void implArrayMarkLegacy(bool legacy) {
3132 auto const recursive = *vmStack().topTV();
3133 if (!tvIsBool(recursive)) {
3134 SystemLib::throwInvalidArgumentExceptionObject(
3135 folly::sformat("$recursive must be a bool; got {}",
3136 getDataTypeString(type(recursive))));
3139 auto const input = vmStack().indTV(1);
3140 auto const output = val(recursive).num
3141 ? arrprov::markTvRecursively(*input, legacy)
3142 : arrprov::markTvShallow(*input, legacy);
3144 vmStack().popTV();
3145 tvMove(output, input);
3149 OPTBLD_INLINE void iopArrayMarkLegacy() {
3150 implArrayMarkLegacy(true);
3153 OPTBLD_INLINE void iopArrayUnmarkLegacy() {
3154 implArrayMarkLegacy(false);
3157 OPTBLD_INLINE void iopSetL(tv_lval to) {
3158 TypedValue* fr = vmStack().topC();
3159 tvSet(*fr, to);
3162 OPTBLD_INLINE void iopSetG() {
3163 StringData* name;
3164 TypedValue* fr = vmStack().topC();
3165 TypedValue* tv2 = vmStack().indTV(1);
3166 auto const to = lookupd_gbl(vmfp(), name, tv2);
3167 SCOPE_EXIT { decRefStr(name); };
3168 assertx(to);
3169 tvSet(*fr, to);
3170 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
3171 vmStack().discard();
3174 OPTBLD_INLINE void iopSetS(ReadonlyOp op) {
3175 TypedValue* tv1 = vmStack().topTV();
3176 TypedValue* clsCell = vmStack().indC(1);
3177 TypedValue* propn = vmStack().indTV(2);
3178 TypedValue* output = propn;
3179 StringData* name;
3180 TypedValue* val;
3181 bool visible, accessible, readonly, constant;
3182 Slot slot;
3184 if (!isClassType(clsCell->m_type)) {
3185 raise_error("Attempting static property access on non class");
3187 auto const cls = clsCell->m_data.pclass;
3189 lookup_sprop(vmfp(), cls, name, propn, val, slot, visible,
3190 accessible, constant, readonly, true);
3192 SCOPE_EXIT { decRefStr(name); };
3194 if (!readonly && op == ReadonlyOp::Readonly) {
3195 throw_must_be_readonly(cls->name()->data(), name->data());
3198 if (!(visible && accessible)) {
3199 raise_error("Invalid static property access: %s::%s",
3200 cls->name()->data(),
3201 name->data());
3203 if (constant) {
3204 throw_cannot_modify_static_const_prop(cls->name()->data(), name->data());
3206 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
3207 auto const& sprop = cls->staticProperties()[slot];
3208 auto const& tc = sprop.typeConstraint;
3209 if (tc.isCheckable()) tc.verifyStaticProperty(tv1, cls, sprop.cls, name);
3210 if (RuntimeOption::EvalEnforceGenericsUB > 0) {
3211 for (auto const& ub : sprop.ubs) {
3212 if (ub.isCheckable()) {
3213 ub.verifyStaticProperty(tv1, cls, sprop.cls, name);
3218 always_assert(cls->sPropLink(slot).isLocal());
3219 tvSet(*tv1, *val);
3220 tvDecRefGen(propn);
3221 memcpy(output, tv1, sizeof(TypedValue));
3222 vmStack().ndiscard(2);
3225 OPTBLD_INLINE void iopSetOpL(tv_lval to, SetOpOp op) {
3226 TypedValue* fr = vmStack().topC();
3227 setopBody(to, op, fr);
3228 tvDecRefGen(fr);
3229 tvDup(*to, *fr);
3232 OPTBLD_INLINE void iopSetOpG(SetOpOp op) {
3233 StringData* name;
3234 TypedValue* fr = vmStack().topC();
3235 TypedValue* tv2 = vmStack().indTV(1);
3236 // XXX We're probably not getting warnings totally correct here
3237 auto const to = lookupd_gbl(vmfp(), name, tv2);
3238 SCOPE_EXIT { decRefStr(name); };
3239 assertx(to);
3240 setopBody(to, op, fr);
3241 tvDecRefGen(fr);
3242 tvDecRefGen(tv2);
3243 tvDup(*to, *tv2);
3244 vmStack().discard();
3247 OPTBLD_INLINE void iopSetOpS(SetOpOp op) {
3248 TypedValue* fr = vmStack().topC();
3249 TypedValue* clsCell = vmStack().indC(1);
3250 TypedValue* propn = vmStack().indTV(2);
3251 TypedValue* output = propn;
3252 StringData* name;
3253 TypedValue* val;
3254 bool visible, accessible, readonly, constant;
3255 Slot slot;
3257 if (!isClassType(clsCell->m_type)) {
3258 raise_error("Attempting static property access on non class");
3260 auto const cls = clsCell->m_data.pclass;
3262 lookup_sprop(vmfp(), cls, name, propn, val, slot, visible,
3263 accessible, constant, readonly, false);
3264 SCOPE_EXIT { decRefStr(name); };
3265 if (!(visible && accessible)) {
3266 raise_error("Invalid static property access: %s::%s",
3267 cls->name()->data(),
3268 name->data());
3270 if (constant) {
3271 throw_cannot_modify_static_const_prop(cls->name()->data(), name->data());
3273 auto const& sprop = cls->staticProperties()[slot];
3274 if (setOpNeedsTypeCheck(sprop.typeConstraint, op, val)) {
3275 TypedValue temp;
3276 tvDup(*val, temp);
3277 SCOPE_FAIL { tvDecRefGen(&temp); };
3278 setopBody(&temp, op, fr);
3279 sprop.typeConstraint.verifyStaticProperty(
3280 &temp, cls, sprop.cls, name
3282 always_assert(cls->sPropLink(slot).isLocal());
3283 tvMove(temp, *val);
3284 } else {
3285 always_assert(cls->sPropLink(slot).isLocal());
3286 setopBody(val, op, fr);
3289 tvDecRefGen(propn);
3290 tvDecRefGen(fr);
3291 tvDup(*val, *output);
3292 vmStack().ndiscard(2);
3295 OPTBLD_INLINE void iopIncDecL(named_local_var fr, IncDecOp op) {
3296 TypedValue* to = vmStack().allocTV();
3297 tvWriteUninit(*to);
3298 if (UNLIKELY(type(fr.lval) == KindOfUninit)) {
3299 raise_undefined_local(vmfp(), fr.name);
3300 tvWriteNull(fr.lval);
3302 tvCopy(IncDecBody(op, fr.lval), *to);
3305 OPTBLD_INLINE void iopIncDecG(IncDecOp op) {
3306 StringData* name;
3307 TypedValue* nameCell = vmStack().topTV();
3308 auto const gbl = lookupd_gbl(vmfp(), name, nameCell);
3309 auto oldNameCell = *nameCell;
3310 SCOPE_EXIT {
3311 decRefStr(name);
3312 tvDecRefGen(oldNameCell);
3314 assertx(gbl);
3315 tvCopy(IncDecBody(op, gbl), *nameCell);
3318 OPTBLD_INLINE void iopIncDecS(IncDecOp op) {
3319 SpropState ss(vmStack(), false);
3320 if (!(ss.visible && ss.accessible)) {
3321 raise_error("Invalid static property access: %s::%s",
3322 ss.cls->name()->data(),
3323 ss.name->data());
3325 if (ss.constant) {
3326 throw_cannot_modify_static_const_prop(ss.cls->name()->data(),
3327 ss.name->data());
3329 auto const checkable_sprop = [&]() -> const Class::SProp* {
3330 if (RuntimeOption::EvalCheckPropTypeHints <= 0) return nullptr;
3331 auto const& sprop = ss.cls->staticProperties()[ss.slot];
3332 return sprop.typeConstraint.isCheckable() ? &sprop : nullptr;
3333 }();
3335 auto const val = ss.val;
3336 if (checkable_sprop) {
3337 TypedValue temp;
3338 tvDup(*val, temp);
3339 SCOPE_FAIL { tvDecRefGen(&temp); };
3340 auto result = IncDecBody(op, &temp);
3341 SCOPE_FAIL { tvDecRefGen(&result); };
3342 checkable_sprop->typeConstraint.verifyStaticProperty(
3343 &temp,
3344 ss.cls,
3345 checkable_sprop->cls,
3346 ss.name
3348 always_assert(ss.cls->sPropLink(ss.slot).isLocal());
3349 tvMove(temp, *val);
3350 tvCopy(result, *ss.output);
3351 } else {
3352 always_assert(ss.cls->sPropLink(ss.slot).isLocal());
3353 tvCopy(IncDecBody(op, val), *ss.output);
3357 OPTBLD_INLINE void iopUnsetL(tv_lval loc) {
3358 tvUnset(loc);
3361 OPTBLD_INLINE void iopUnsetG() {
3362 TypedValue* tv1 = vmStack().topTV();
3363 StringData* name = lookup_name(tv1);
3364 SCOPE_EXIT { decRefStr(name); };
3365 auto env = g_context->m_globalNVTable;
3366 assertx(env != nullptr);
3367 env->unset(name);
3368 vmStack().popC();
3371 namespace {
3373 void initClosureLocals() {
3374 auto const ar = vmfp();
3375 if (!ar->func()->isClosureBody()) return;
3376 c_Closure::initActRecFromClosure(ar);
3379 void initRegularLocals() {
3380 auto const ar = vmfp();
3381 auto const func = ar->func();
3382 auto const firstRegularLocal = func->firstRegularLocalId();
3383 auto const numLocals = func->numLocals();
3384 for (auto i = firstRegularLocal; i < numLocals; ++i) {
3385 tvWriteUninit(frame_local(ar, i));
3391 bool funcEntry() {
3392 assertx(!isResumed(vmfp()));
3393 assertx(
3394 reinterpret_cast<TypedValue*>(vmfp()) - vmStack().top() ==
3395 vmfp()->func()->numSlotsInFrame()
3398 initClosureLocals();
3399 initRegularLocals();
3401 // If this returns false, the callee was intercepted and should be skipped.
3402 return EventHook::FunctionCall(
3403 vmfp(), EventHook::NormalFunc, EventHook::Source::Interpreter);
3406 void doFCall(PrologueFlags prologueFlags, const Func* func,
3407 uint32_t numArgsInclUnpack, void* ctx, TCA retAddr) {
3408 TRACE(3, "FCall: pc %p func %p\n", vmpc(), vmfp()->func()->entry());
3410 assertx(numArgsInclUnpack <= func->numNonVariadicParams() + 1);
3411 assertx(kNumActRecCells == 2);
3412 ActRec* ar = vmStack().indA(
3413 numArgsInclUnpack + (prologueFlags.hasGenerics() ? 1 : 0));
3415 // Callee checks and input initialization.
3416 calleeGenericsChecks(func, prologueFlags.hasGenerics());
3417 calleeArgumentArityChecks(func, numArgsInclUnpack);
3418 calleeDynamicCallChecks(func, prologueFlags.isDynamicCall());
3419 calleeCoeffectChecks(func, prologueFlags.coeffects(), numArgsInclUnpack, ctx);
3420 func->recordCall();
3421 initFuncInputs(func, numArgsInclUnpack);
3423 ar->m_sfp = vmfp();
3424 ar->setFunc(func);
3425 ar->setJitReturn(retAddr);
3426 ar->m_callOffAndFlags = ActRec::encodeCallOffsetAndFlags(
3427 prologueFlags.callOffset(),
3428 prologueFlags.asyncEagerReturn() ? (1 << ActRec::AsyncEagerRet) : 0
3430 ar->setThisOrClassAllowNull(ctx);
3432 prepareFuncEntry(ar, numArgsInclUnpack);
3435 namespace {
3437 enum class NoCtx {};
3439 void* takeCtx(Class* cls) { return cls; }
3440 void* takeCtx(Object& obj) = delete;
3441 void* takeCtx(Object&& obj) { return obj.detach(); }
3442 void* takeCtx(NoCtx) {
3443 if (debug) return reinterpret_cast<void*>(ActRec::kTrashedThisSlot);
3444 return nullptr;
3447 template<bool dynamic, typename Ctx>
3448 JitResumeAddr fcallImpl(bool retToJit, PC origpc, PC& pc, const FCallArgs& fca,
3449 const Func* func, Ctx&& ctx,
3450 bool logAsDynamicCall = true, bool isCtor = false) {
3451 if (fca.enforceInOut()) checkInOutMismatch(func, fca.numArgs, fca.inoutArgs);
3452 if (fca.enforceReadonly()) {
3453 checkReadonlyMismatch(func, fca.numArgs, fca.readonlyArgs);
3455 if (fca.enforceMutableReturn() && (func->attrs() & AttrReadonlyReturn)) {
3456 throwReadonlyMismatch(func, kReadonlyReturnId);
3458 if (fca.enforceReadonlyThis() && !(func->attrs() & AttrReadonlyThis)) {
3459 throwReadonlyMismatch(func, kReadonlyThisId);
3461 if (dynamic && logAsDynamicCall) callerDynamicCallChecks(func);
3462 checkStack(vmStack(), func, 0);
3464 auto const numArgsInclUnpack = [&] {
3465 if (UNLIKELY(fca.hasUnpack())) {
3466 GenericsSaver gs{fca.hasGenerics()};
3467 return prepareUnpackArgs(func, fca.numArgs, true);
3470 if (UNLIKELY(fca.numArgs > func->numNonVariadicParams())) {
3471 GenericsSaver gs{fca.hasGenerics()};
3472 iopNewVec(fca.numArgs - func->numNonVariadicParams());
3473 return func->numNonVariadicParams() + 1;
3476 return fca.numArgs;
3477 }();
3479 auto const prologueFlags = PrologueFlags(
3480 fca.hasGenerics(),
3481 dynamic,
3482 fca.asyncEagerOffset != kInvalidOffset && func->supportsAsyncEagerReturn(),
3483 Offset(origpc - vmfp()->func()->entry()),
3484 0, // generics bitmap not used by interpreter
3485 vmfp()->providedCoeffectsForCall(isCtor)
3488 doFCall(prologueFlags, func, numArgsInclUnpack,
3489 takeCtx(std::forward<Ctx>(ctx)), jit::tc::ustubs().retHelper);
3491 if (retToJit) {
3492 // Let JIT handle FuncEntry if possible.
3493 pc = vmpc();
3494 return
3495 JitResumeAddr::helper(jit::tc::ustubs().resumeHelperFuncEntryFromInterp);
3498 funcEntry();
3499 pc = vmpc();
3500 return JitResumeAddr::none();
3503 const StaticString s___invoke("__invoke");
3505 // This covers both closures and functors.
3506 OPTBLD_INLINE JitResumeAddr fcallFuncObj(bool retToJit, PC origpc, PC& pc,
3507 const FCallArgs& fca) {
3508 assertx(tvIsObject(vmStack().topC()));
3509 auto obj = Object::attach(vmStack().topC()->m_data.pobj);
3510 vmStack().discard();
3512 auto const cls = obj->getVMClass();
3513 auto const func = cls->lookupMethod(s___invoke.get());
3515 if (func == nullptr) {
3516 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
3519 if (func->isStaticInPrologue()) {
3520 obj.reset();
3521 return fcallImpl<false>(retToJit, origpc, pc, fca, func, cls);
3522 } else {
3523 return fcallImpl<false>(retToJit, origpc, pc, fca, func, std::move(obj));
3528 * Supports callables:
3529 * array($instance, 'method')
3530 * array('Class', 'method'),
3531 * vec[$instance, 'method'],
3532 * vec['Class', 'method'],
3533 * dict[0 => $instance, 1 => 'method'],
3534 * dict[0 => 'Class', 1 => 'method'],
3535 * array(Class*, Func*),
3536 * array(ObjectData*, Func*),
3538 OPTBLD_INLINE JitResumeAddr fcallFuncArr(bool retToJit, PC origpc, PC& pc,
3539 const FCallArgs& fca) {
3540 assertx(tvIsArrayLike(vmStack().topC()));
3541 auto arr = Array::attach(vmStack().topC()->m_data.parr);
3542 vmStack().discard();
3544 ObjectData* thiz = nullptr;
3545 HPHP::Class* cls = nullptr;
3546 bool dynamic = false;
3548 auto const func = vm_decode_function(const_variant_ref{arr}, vmfp(), thiz,
3549 cls, dynamic, DecodeFlags::NoWarn);
3550 assertx(dynamic);
3551 if (UNLIKELY(func == nullptr)) {
3552 raise_error("Invalid callable (array)");
3555 Object thisRC(thiz);
3556 arr.reset();
3558 if (thisRC) {
3559 return fcallImpl<true>(retToJit, origpc, pc, fca, func, std::move(thisRC));
3560 } else if (cls) {
3561 return fcallImpl<true>(retToJit, origpc, pc, fca, func, cls);
3562 } else {
3563 return fcallImpl<true>(retToJit, origpc, pc, fca, func, NoCtx{});
3568 * Supports callables:
3569 * 'func_name'
3570 * 'class::method'
3572 OPTBLD_INLINE JitResumeAddr fcallFuncStr(bool retToJit, PC origpc, PC& pc,
3573 const FCallArgs& fca) {
3574 assertx(tvIsString(vmStack().topC()));
3575 auto str = String::attach(vmStack().topC()->m_data.pstr);
3576 vmStack().discard();
3578 ObjectData* thiz = nullptr;
3579 HPHP::Class* cls = nullptr;
3580 bool dynamic = false;
3582 auto const func = vm_decode_function(const_variant_ref{str}, vmfp(), thiz,
3583 cls, dynamic, DecodeFlags::NoWarn);
3584 assertx(dynamic);
3585 if (UNLIKELY(func == nullptr)) {
3586 raise_call_to_undefined(str.get());
3589 Object thisRC(thiz);
3590 str.reset();
3592 if (thisRC) {
3593 return fcallImpl<true>(retToJit, origpc, pc, fca, func, std::move(thisRC));
3594 } else if (cls) {
3595 return fcallImpl<true>(retToJit, origpc, pc, fca, func, cls);
3596 } else {
3597 return fcallImpl<true>(retToJit, origpc, pc, fca, func, NoCtx{});
3601 OPTBLD_INLINE JitResumeAddr fcallFuncFunc(bool retToJit, PC origpc, PC& pc,
3602 const FCallArgs& fca) {
3603 assertx(tvIsFunc(vmStack().topC()));
3604 auto func = vmStack().topC()->m_data.pfunc;
3605 vmStack().discard();
3607 if (func->cls()) {
3608 raise_error(Strings::CALL_ILLFORMED_FUNC);
3611 return fcallImpl<false>(retToJit, origpc, pc, fca, func, NoCtx{});
3614 OPTBLD_INLINE JitResumeAddr fcallFuncRFunc(bool retToJit, PC origpc, PC& pc,
3615 FCallArgs& fca) {
3616 assertx(tvIsRFunc(vmStack().topC()));
3617 auto const rfunc = vmStack().topC()->m_data.prfunc;
3618 auto const func = rfunc->m_func;
3619 vmStack().discard();
3620 vmStack().pushArrayLike(rfunc->m_arr);
3621 decRefRFunc(rfunc);
3623 return
3624 fcallImpl<false>(retToJit, origpc, pc, fca.withGenerics(), func, NoCtx{});
3627 OPTBLD_INLINE JitResumeAddr fcallFuncClsMeth(bool retToJit, PC origpc, PC& pc,
3628 const FCallArgs& fca) {
3629 assertx(tvIsClsMeth(vmStack().topC()));
3630 auto const clsMeth = vmStack().topC()->m_data.pclsmeth;
3631 vmStack().discard();
3633 const Func* func = clsMeth->getFunc();
3634 auto const cls = clsMeth->getCls();
3635 assertx(func && cls);
3637 return fcallImpl<false>(retToJit, origpc, pc, fca, func, cls);
3640 OPTBLD_INLINE JitResumeAddr fcallFuncRClsMeth(bool retToJit, PC origpc, PC& pc,
3641 const FCallArgs& fca) {
3642 assertx(tvIsRClsMeth(vmStack().topC()));
3643 auto const rclsMeth = vmStack().topC()->m_data.prclsmeth;
3644 auto const cls = rclsMeth->m_cls;
3645 auto const func = rclsMeth->m_func;
3646 vmStack().discard();
3647 vmStack().pushArrayLike(rclsMeth->m_arr);
3648 decRefRClsMeth(rclsMeth);
3650 return fcallImpl<false>(retToJit, origpc, pc, fca.withGenerics(), func, cls);
3653 Func* resolveFuncImpl(Id id) {
3654 auto unit = vmfp()->func()->unit();
3655 auto const nep = unit->lookupNamedEntityPairId(id);
3656 auto func = Func::load(nep.second, nep.first);
3657 if (func == nullptr) raise_resolve_undefined(unit->lookupLitstrId(id));
3658 return func;
3661 OPTBLD_INLINE void iopResolveFunc(Id id) {
3662 auto func = resolveFuncImpl(id);
3663 vmStack().pushFunc(func);
3666 OPTBLD_INLINE void iopResolveMethCaller(Id id) {
3667 auto unit = vmfp()->func()->unit();
3668 auto const nep = unit->lookupNamedEntityPairId(id);
3669 auto func = Func::load(nep.second, nep.first);
3670 assertx(func && func->isMethCaller());
3671 checkMethCaller(func, arGetContextClass(vmfp()));
3672 vmStack().pushFunc(func);
3675 RFuncData* newRFuncImpl(Func* func, ArrayData* reified_generics) {
3676 auto rfunc = RFuncData::newInstance(func, reified_generics);
3677 TRACE(2, "ResolveRFunc: just created new rfunc %s: %p\n",
3678 func->name()->data(), rfunc);
3679 return rfunc;
3682 } // namespace
3684 OPTBLD_INLINE void iopResolveRFunc(Id id) {
3685 auto const tsList = vmStack().topC();
3687 // Should I refactor this out with iopNewObj*?
3688 auto const reified = [&] () -> ArrayData* {
3689 if (!tvIsVec(tsList)) {
3690 raise_error("Attempting ResolveRFunc with invalid reified generics");
3692 return tsList->m_data.parr;
3693 }();
3695 auto func = resolveFuncImpl(id);
3696 if (!func->hasReifiedGenerics()) {
3697 vmStack().popC();
3698 vmStack().pushFunc(func);
3699 } else {
3700 checkFunReifiedGenericMismatch(func, reified);
3701 auto rfunc = newRFuncImpl(func, reified);
3702 vmStack().discard();
3703 vmStack().pushRFuncNoRc(rfunc);
3707 OPTBLD_INLINE JitResumeAddr iopFCallFunc(bool retToJit, PC origpc, PC& pc,
3708 FCallArgs fca) {
3709 auto const type = vmStack().topC()->m_type;
3710 if (isObjectType(type)) return fcallFuncObj(retToJit, origpc, pc, fca);
3711 if (isArrayLikeType(type)) return fcallFuncArr(retToJit, origpc, pc, fca);
3712 if (isStringType(type)) return fcallFuncStr(retToJit, origpc, pc, fca);
3713 if (isFuncType(type)) return fcallFuncFunc(retToJit, origpc, pc, fca);
3714 if (isRFuncType(type)) return fcallFuncRFunc(retToJit, origpc, pc, fca);
3715 if (isClsMethType(type)) return fcallFuncClsMeth(retToJit, origpc, pc, fca);
3716 if (isRClsMethType(type)) return fcallFuncRClsMeth(retToJit, origpc, pc, fca);
3718 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
3721 OPTBLD_INLINE JitResumeAddr iopFCallFuncD(bool retToJit, PC origpc, PC& pc,
3722 FCallArgs fca, Id id) {
3723 auto const nep = vmfp()->unit()->lookupNamedEntityPairId(id);
3724 auto const func = Func::load(nep.second, nep.first);
3725 if (UNLIKELY(func == nullptr)) {
3726 raise_call_to_undefined(vmfp()->unit()->lookupLitstrId(id));
3729 return fcallImpl<false>(retToJit, origpc, pc, fca, func, NoCtx{});
3732 namespace {
3734 const StaticString
3735 s_DynamicContextOverrideUnsafe("__SystemLib\\DynamicContextOverrideUnsafe");
3737 template<bool dynamic>
3738 JitResumeAddr fcallObjMethodImpl(bool retToJit, PC origpc, PC& pc,
3739 const FCallArgs& fca, StringData* methName) {
3740 const Func* func;
3741 LookupResult res;
3742 assertx(tvIsObject(vmStack().indC(fca.numInputs() + (kNumActRecCells - 1))));
3743 auto const obj =
3744 vmStack().indC(fca.numInputs() + (kNumActRecCells - 1))->m_data.pobj;
3745 auto cls = obj->getVMClass();
3746 auto const ctx = [&] {
3747 if (!fca.context) return arGetContextClass(vmfp());
3748 if (fca.context->isame(s_DynamicContextOverrideUnsafe.get())) {
3749 if (RO::RepoAuthoritative) {
3750 raise_error("Cannot use dynamic_meth_caller_force() in repo-mode");
3752 return cls;
3754 return Class::load(fca.context);
3755 }();
3756 auto const callCtx = MethodLookupCallContext(ctx, vmfp()->func());
3757 // if lookup throws, obj will be decref'd via stack
3758 res = lookupObjMethod(func, cls, methName, callCtx,
3759 MethodLookupErrorOptions::RaiseOnNotFound);
3760 assertx(func);
3761 decRefStr(methName);
3762 if (res == LookupResult::MethodFoundNoThis) {
3763 throw_has_this_need_static(func);
3765 assertx(res == LookupResult::MethodFoundWithThis);
3767 if (func->hasReifiedGenerics() && !fca.hasGenerics() &&
3768 !func->getReifiedGenericsInfo().allGenericsSoft()) {
3769 throw_call_reified_func_without_generics(func);
3772 // fcallImpl() will do further checks before spilling the ActRec. If any
3773 // of these checks fail, make sure it gets decref'd only via ctx.
3774 tvWriteNull(*vmStack().indC(fca.numInputs() + (kNumActRecCells - 1)));
3775 return
3776 fcallImpl<dynamic>(retToJit, origpc, pc, fca, func, Object::attach(obj));
3779 static void raise_resolve_non_object(const char* methodName,
3780 const char* typeName = nullptr) {
3781 auto const msg = folly::sformat(
3782 "Cannot resolve a member function {}() on a non-object ({})",
3783 methodName, typeName
3786 raise_fatal_error(msg.c_str());
3789 static void throw_call_non_object(const char* methodName,
3790 const char* typeName = nullptr) {
3791 std::string msg;
3792 folly::format(&msg, "Call to a member function {}() on a non-object ({})",
3793 methodName, typeName);
3795 if (RuntimeOption::ThrowExceptionOnBadMethodCall) {
3796 SystemLib::throwBadMethodCallExceptionObject(String(msg));
3798 raise_fatal_error(msg.c_str());
3801 ALWAYS_INLINE bool
3802 fcallObjMethodHandleInput(const FCallArgs& fca, ObjMethodOp op,
3803 const StringData* methName, bool extraStk) {
3804 TypedValue* obj = vmStack().indC(fca.numInputs()
3805 + (kNumActRecCells - 1)
3806 + (extraStk ? 1 : 0));
3807 if (LIKELY(isObjectType(obj->m_type))) return false;
3809 if (UNLIKELY(op == ObjMethodOp::NullThrows || !isNullType(obj->m_type))) {
3810 auto const dataTypeStr = getDataTypeString(obj->m_type).get();
3811 throw_call_non_object(methName->data(), dataTypeStr->data());
3814 // null?->method(...), pop extra stack input, all arguments and two uninits,
3815 // the null "object" and all uninits for inout returns, then push null.
3816 auto& stack = vmStack();
3817 if (extraStk) stack.popC();
3818 if (fca.hasGenerics()) stack.popC();
3819 if (fca.hasUnpack()) stack.popC();
3821 // Save any inout arguments, as those will be pushed unchanged as
3822 // the output.
3823 std::vector<TypedValue> inOuts;
3824 for (uint32_t i = 0; i < fca.numArgs; ++i) {
3825 if (fca.enforceInOut() && fca.isInOut(fca.numArgs - i - 1)) {
3826 inOuts.emplace_back(*stack.top());
3827 stack.discard();
3828 } else {
3829 stack.popTV();
3832 stack.popU();
3833 stack.popC();
3834 for (uint32_t i = 0; i < fca.numRets - 1; ++i) stack.popU();
3836 assertx(inOuts.size() == fca.numRets - 1);
3837 for (auto const tv : inOuts) *stack.allocC() = tv;
3838 stack.pushNull();
3840 // Handled.
3841 return true;
3844 } // namespace
3846 OPTBLD_INLINE JitResumeAddr
3847 iopFCallObjMethod(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
3848 const StringData*, ObjMethodOp op) {
3849 TypedValue* c1 = vmStack().topC(); // Method name.
3850 if (!isStringType(c1->m_type)) {
3851 raise_error(Strings::METHOD_NAME_MUST_BE_STRING);
3854 StringData* methName = c1->m_data.pstr;
3855 if (fcallObjMethodHandleInput(fca, op, methName, true)) {
3856 return JitResumeAddr::none();
3859 // We handle decReffing method name in fcallObjMethodImpl
3860 vmStack().discard();
3861 return fcallObjMethodImpl<true>(retToJit, origpc, pc, fca, methName);
3864 OPTBLD_INLINE JitResumeAddr
3865 iopFCallObjMethodD(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
3866 const StringData*, ObjMethodOp op,
3867 const StringData* methName) {
3868 if (fcallObjMethodHandleInput(fca, op, methName, false)) {
3869 return JitResumeAddr::none();
3871 auto const methNameC = const_cast<StringData*>(methName);
3872 return fcallObjMethodImpl<false>(retToJit, origpc, pc, fca, methNameC);
3875 Class* specialClsRefToCls(SpecialClsRef ref) {
3876 switch (ref) {
3877 case SpecialClsRef::LateBoundCls:
3878 if (auto const cls = frameStaticClass(vmfp())) return cls;
3879 raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
3880 case SpecialClsRef::SelfCls:
3881 if (auto const cls = arGetContextClass(vmfp())) return cls;
3882 raise_error(HPHP::Strings::CANT_ACCESS_SELF);
3883 case SpecialClsRef::ParentCls:
3884 if (auto const cls = arGetContextClass(vmfp())) {
3885 if (auto const parent = cls->parent()) return parent;
3886 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
3888 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
3890 always_assert(false);
3893 namespace {
3895 const Func* resolveClsMethodFunc(Class* cls, const StringData* methName) {
3896 const Func* func;
3897 auto const ctx = arGetContextClass(vmfp());
3898 auto const callCtx = MethodLookupCallContext(ctx, vmfp()->func());
3899 auto const res = lookupClsMethod(func, cls, methName, nullptr,
3900 callCtx,
3901 MethodLookupErrorOptions::None);
3902 if (res == LookupResult::MethodNotFound) {
3903 raise_error("Failure to resolve method name \'%s::%s\'",
3904 cls->name()->data(), methName->data());
3906 assertx(res == LookupResult::MethodFoundNoThis);
3907 assertx(func);
3908 checkClsMethFuncHelper(func);
3909 return func;
3912 template<bool extraStk = false>
3913 void resolveClsMethodImpl(Class* cls, const StringData* methName) {
3914 const Func* func = resolveClsMethodFunc(cls, methName);
3915 auto clsmeth = ClsMethDataRef::create(cls, const_cast<Func*>(func));
3916 if (extraStk) vmStack().popC();
3917 vmStack().pushClsMethNoRc(clsmeth);
3920 } // namespace
3922 OPTBLD_INLINE void iopResolveClsMethod(const StringData* methName) {
3923 auto const c = vmStack().topC();
3924 if (!isClassType(c->m_type)) {
3925 raise_error("Attempting ResolveClsMethod with non-class");
3927 resolveClsMethodImpl<true>(c->m_data.pclass, methName);
3930 OPTBLD_INLINE void iopResolveClsMethodD(Id classId,
3931 const StringData* methName) {
3932 auto const nep = vmfp()->func()->unit()->lookupNamedEntityPairId(classId);
3933 auto cls = Class::load(nep.second, nep.first);
3934 if (UNLIKELY(cls == nullptr)) {
3935 raise_error("Failure to resolve class name \'%s\'", nep.first->data());
3937 resolveClsMethodImpl(cls, methName);
3940 OPTBLD_INLINE void iopResolveClsMethodS(SpecialClsRef ref,
3941 const StringData* methName) {
3942 resolveClsMethodImpl(specialClsRefToCls(ref), methName);
3945 namespace {
3947 template<bool extraStk = false>
3948 void resolveRClsMethodImpl(Class* cls, const StringData* methName) {
3949 const Func* func = resolveClsMethodFunc(cls, methName);
3951 auto const tsList = vmStack().topC();
3952 auto const reified = [&] () -> ArrayData* {
3953 if (!tvIsVec(tsList)) {
3954 raise_error("Invalid reified generics when resolving class method");
3956 return tsList->m_data.parr;
3957 }();
3959 if (func->hasReifiedGenerics()) {
3960 checkFunReifiedGenericMismatch(func, reified);
3961 auto rclsmeth = RClsMethData::create(cls, const_cast<Func*>(func), reified);
3962 vmStack().discard();
3963 if (extraStk) vmStack().popC();
3964 vmStack().pushRClsMethNoRc(rclsmeth);
3965 } else {
3966 auto clsmeth = ClsMethDataRef::create(cls, const_cast<Func*>(func));
3967 vmStack().popC();
3968 if (extraStk) vmStack().popC();
3969 vmStack().pushClsMethNoRc(clsmeth);
3973 } // namespace
3975 OPTBLD_INLINE void iopResolveRClsMethod(const StringData* methName) {
3976 auto const c = vmStack().indC(1);
3977 if (!isClassType(c->m_type)) {
3978 raise_error("Attempting ResolveRClsMethod with non-class");
3980 resolveRClsMethodImpl<true>(c->m_data.pclass, methName);
3983 OPTBLD_INLINE void iopResolveRClsMethodD(Id classId,
3984 const StringData* methName) {
3985 auto const nep = vmfp()->func()->unit()->lookupNamedEntityPairId(classId);
3986 auto cls = Class::load(nep.second, nep.first);
3987 if (UNLIKELY(cls == nullptr)) {
3988 raise_error("Failure to resolve class name \'%s\'", nep.first->data());
3990 resolveRClsMethodImpl<false>(cls, methName);
3993 OPTBLD_INLINE void iopResolveRClsMethodS(SpecialClsRef ref,
3994 const StringData* methName) {
3995 resolveRClsMethodImpl<false>(specialClsRefToCls(ref), methName);
3998 namespace {
4000 template<bool dynamic>
4001 JitResumeAddr fcallClsMethodImpl(bool retToJit, PC origpc, PC& pc,
4002 const FCallArgs& fca, Class* cls,
4003 StringData* methName, bool forwarding,
4004 bool logAsDynamicCall = true) {
4005 auto const ctx = [&] {
4006 if (!fca.context) return liveClass();
4007 if (fca.context->isame(s_DynamicContextOverrideUnsafe.get())) {
4008 if (RO::RepoAuthoritative) {
4009 raise_error("Cannot use dynamic_meth_caller_force() in repo-mode");
4011 return cls;
4013 return Class::load(fca.context);
4014 }();
4015 auto obj = liveClass() && vmfp()->hasThis() ? vmfp()->getThis() : nullptr;
4016 const Func* func;
4017 auto const callCtx = MethodLookupCallContext(ctx, vmfp()->func());
4018 auto const res = lookupClsMethod(func, cls, methName, obj, callCtx,
4019 MethodLookupErrorOptions::RaiseOnNotFound);
4020 assertx(func);
4021 decRefStr(methName);
4023 if (res == LookupResult::MethodFoundNoThis) {
4024 if (!func->isStaticInPrologue()) {
4025 throw_missing_this(func);
4027 obj = nullptr;
4028 } else {
4029 assertx(obj);
4030 assertx(res == LookupResult::MethodFoundWithThis);
4033 if (func->hasReifiedGenerics() && !fca.hasGenerics() &&
4034 !func->getReifiedGenericsInfo().allGenericsSoft()) {
4035 throw_call_reified_func_without_generics(func);
4038 if (obj) {
4039 return fcallImpl<dynamic>(
4040 retToJit, origpc, pc, fca, func, Object(obj), logAsDynamicCall);
4041 } else {
4042 if (forwarding && ctx) {
4043 /* Propagate the current late bound class if there is one, */
4044 /* otherwise use the class given by this instruction's input */
4045 if (vmfp()->hasThis()) {
4046 cls = vmfp()->getThis()->getVMClass();
4047 } else {
4048 cls = vmfp()->getClass();
4051 return fcallImpl<dynamic>(
4052 retToJit, origpc, pc, fca, func, cls, logAsDynamicCall);
4056 } // namespace
4058 OPTBLD_INLINE JitResumeAddr
4059 iopFCallClsMethod(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4060 const StringData*, IsLogAsDynamicCallOp op) {
4061 auto const c1 = vmStack().topC();
4062 if (!isClassType(c1->m_type)) {
4063 raise_error("Attempting to use non-class in FCallClsMethod");
4065 auto const cls = c1->m_data.pclass;
4067 auto const c2 = vmStack().indC(1); // Method name.
4068 if (!isStringType(c2->m_type)) {
4069 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4071 auto methName = c2->m_data.pstr;
4073 // fcallClsMethodImpl will take care of decReffing method name
4074 vmStack().ndiscard(2);
4075 assertx(cls && methName);
4076 auto const logAsDynamicCall = op == IsLogAsDynamicCallOp::LogAsDynamicCall ||
4077 RuntimeOption::EvalLogKnownMethodsAsDynamicCalls;
4078 return fcallClsMethodImpl<true>(
4079 retToJit, origpc, pc, fca, cls, methName, false, logAsDynamicCall);
4082 OPTBLD_INLINE JitResumeAddr
4083 iopFCallClsMethodM(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4084 const StringData*, IsLogAsDynamicCallOp op,
4085 const StringData* methName) {
4086 auto const cell = vmStack().topC();
4087 auto isString = isStringType(cell->m_type);
4088 if (isString) {
4089 raise_str_to_class_notice(cell->m_data.pstr);
4091 auto const cls = lookupClsRef(cell);
4092 vmStack().popC();
4093 auto const methNameC = const_cast<StringData*>(methName);
4094 assertx(cls && methNameC);
4095 auto const logAsDynamicCall = op == IsLogAsDynamicCallOp::LogAsDynamicCall ||
4096 RuntimeOption::EvalLogKnownMethodsAsDynamicCalls;
4097 if (isString || RuntimeOption::EvalEmitClassPointers == 0) {
4098 return fcallClsMethodImpl<true>(
4099 retToJit, origpc, pc, fca, cls, methNameC, false, logAsDynamicCall);
4100 } else {
4101 return fcallClsMethodImpl<false>(
4102 retToJit, origpc, pc, fca, cls, methNameC, false, logAsDynamicCall);
4106 OPTBLD_INLINE JitResumeAddr
4107 iopFCallClsMethodD(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4108 const StringData*, Id classId, const StringData* methName) {
4109 const NamedEntityPair &nep =
4110 vmfp()->func()->unit()->lookupNamedEntityPairId(classId);
4111 Class* cls = Class::load(nep.second, nep.first);
4112 if (cls == nullptr) {
4113 raise_error(Strings::UNKNOWN_CLASS, nep.first->data());
4115 auto const methNameC = const_cast<StringData*>(methName);
4116 return fcallClsMethodImpl<false>(
4117 retToJit, origpc, pc, fca, cls, methNameC, false);
4120 OPTBLD_INLINE JitResumeAddr
4121 iopFCallClsMethodS(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4122 const StringData*, SpecialClsRef ref) {
4123 auto const c1 = vmStack().topC(); // Method name.
4124 if (!isStringType(c1->m_type)) {
4125 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4127 auto const cls = specialClsRefToCls(ref);
4128 auto methName = c1->m_data.pstr;
4130 // fcallClsMethodImpl will take care of decReffing name
4131 vmStack().ndiscard(1);
4132 auto const fwd = ref == SpecialClsRef::SelfCls ||
4133 ref == SpecialClsRef::ParentCls;
4134 return fcallClsMethodImpl<true>(
4135 retToJit, origpc, pc, fca, cls, methName, fwd);
4138 OPTBLD_INLINE JitResumeAddr
4139 iopFCallClsMethodSD(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4140 const StringData*, SpecialClsRef ref,
4141 const StringData* methName) {
4142 auto const cls = specialClsRefToCls(ref);
4143 auto const methNameC = const_cast<StringData*>(methName);
4144 auto const fwd = ref == SpecialClsRef::SelfCls ||
4145 ref == SpecialClsRef::ParentCls;
4146 return fcallClsMethodImpl<false>(
4147 retToJit, origpc, pc, fca, cls, methNameC, fwd);
4150 namespace {
4152 ObjectData* newObjImpl(Class* cls, ArrayData* reified_types) {
4153 // Replace input with uninitialized instance.
4154 auto this_ = reified_types
4155 ? ObjectData::newInstanceReified<true>(cls, reified_types)
4156 : ObjectData::newInstance<true>(cls);
4157 TRACE(2, "NewObj: just new'ed an instance of class %s: %p\n",
4158 cls->name()->data(), this_);
4159 profileArrLikePropsForInterp(this_);
4160 return this_;
4163 void newObjDImpl(Id id, ArrayData* reified_types) {
4164 const NamedEntityPair &nep =
4165 vmfp()->func()->unit()->lookupNamedEntityPairId(id);
4166 auto cls = Class::load(nep.second, nep.first);
4167 if (cls == nullptr) {
4168 raise_error(Strings::UNKNOWN_CLASS,
4169 vmfp()->func()->unit()->lookupLitstrId(id)->data());
4171 auto this_ = newObjImpl(cls, reified_types);
4172 if (reified_types) vmStack().popC();
4173 vmStack().pushObjectNoRc(this_);
4176 } // namespace
4178 OPTBLD_INLINE void iopNewObj() {
4179 auto const clsCell = vmStack().topC();
4180 if (!isClassType(clsCell->m_type)) {
4181 raise_error("Attempting NewObj with non-class");
4183 auto const cls = clsCell->m_data.pclass;
4185 callerDynamicConstructChecks(cls);
4186 auto this_ = newObjImpl(cls, nullptr);
4187 vmStack().popC();
4188 vmStack().pushObjectNoRc(this_);
4191 OPTBLD_INLINE void iopNewObjR() {
4192 auto const reifiedCell = vmStack().topC();
4193 auto const clsCell = vmStack().indC(1);
4195 if (!isClassType(clsCell->m_type)) {
4196 raise_error("Attempting NewObjR with non-class");
4198 auto const cls = clsCell->m_data.pclass;
4200 auto const reified = [&] () -> ArrayData* {
4201 if (reifiedCell->m_type == KindOfNull) return nullptr;
4202 if (!tvIsVec(reifiedCell)) {
4203 raise_error("Attempting NewObjR with invalid reified generics");
4205 return reifiedCell->m_data.parr;
4206 }();
4208 callerDynamicConstructChecks(cls);
4209 auto this_ = newObjImpl(cls, reified);
4210 vmStack().popC();
4211 vmStack().popC();
4212 vmStack().pushObjectNoRc(this_);
4215 OPTBLD_INLINE void iopNewObjD(Id id) {
4216 newObjDImpl(id, nullptr);
4219 OPTBLD_INLINE void iopNewObjRD(Id id) {
4220 auto const tsList = vmStack().topC();
4222 auto const reified = [&] () -> ArrayData* {
4223 if (tsList->m_type == KindOfNull) return nullptr;
4224 if (!tvIsVec(tsList)) {
4225 raise_error("Attempting NewObjRD with invalid reified generics");
4227 return tsList->m_data.parr;
4228 }();
4229 newObjDImpl(id, reified);
4232 OPTBLD_INLINE void iopNewObjS(SpecialClsRef ref) {
4233 auto const cls = specialClsRefToCls(ref);
4234 if (ref == SpecialClsRef::LateBoundCls && cls->hasReifiedGenerics()) {
4235 raise_error(Strings::NEW_STATIC_ON_REIFIED_CLASS, cls->name()->data());
4237 auto const reified_generics = cls->hasReifiedGenerics()
4238 ? getClsReifiedGenericsProp(cls, vmfp()) : nullptr;
4239 auto this_ = newObjImpl(cls, reified_generics);
4240 vmStack().pushObjectNoRc(this_);
4243 OPTBLD_INLINE JitResumeAddr iopFCallCtor(bool retToJit, PC origpc, PC& pc,
4244 FCallArgs fca, const StringData*) {
4245 assertx(fca.numRets == 1);
4246 assertx(fca.asyncEagerOffset == kInvalidOffset);
4247 assertx(tvIsObject(vmStack().indC(fca.numInputs() + (kNumActRecCells - 1))));
4248 auto const obj =
4249 vmStack().indC(fca.numInputs() + (kNumActRecCells - 1))->m_data.pobj;
4251 const Func* func;
4252 auto const ctx = arGetContextClass(vmfp());
4253 auto const callCtx = MethodLookupCallContext(ctx, vmfp()->func());
4254 auto const res UNUSED = lookupCtorMethod(func, obj->getVMClass(), callCtx,
4255 MethodLookupErrorOptions::RaiseOnNotFound);
4256 assertx(res == LookupResult::MethodFoundWithThis);
4258 // fcallImpl() will do further checks before spilling the ActRec. If any
4259 // of these checks fail, make sure it gets decref'd only via ctx.
4260 tvWriteNull(*vmStack().indC(fca.numInputs() + (kNumActRecCells - 1)));
4261 return fcallImpl<false>(
4262 retToJit, origpc, pc, fca, func, Object::attach(obj), true, true);
4265 OPTBLD_INLINE void iopLockObj() {
4266 auto c1 = vmStack().topC();
4267 if (!tvIsObject(*c1)) raise_error("LockObj: expected an object");
4268 c1->m_data.pobj->lockObject();
4271 namespace {
4273 void implIterInit(PC& pc, const IterArgs& ita, TypedValue* base,
4274 PC targetpc, IterTypeOp op) {
4275 auto const local = base != nullptr;
4277 if (!local) base = vmStack().topC();
4278 auto val = frame_local(vmfp(), ita.valId);
4279 auto key = ita.hasKey() ? frame_local(vmfp(), ita.keyId) : nullptr;
4280 auto it = frame_iter(vmfp(), ita.iterId);
4282 if (isArrayLikeType(type(base))) {
4283 auto const arr = base->m_data.parr;
4284 auto const res = key
4285 ? new_iter_array_key_helper(op)(it, arr, val, key)
4286 : new_iter_array_helper(op)(it, arr, val);
4287 if (res == 0) pc = targetpc;
4288 if (!local) vmStack().discard();
4289 return;
4292 // NOTE: It looks like we could call new_iter_object at this point. However,
4293 // doing so is incorrect, since new_iter_array / new_iter_object only handle
4294 // array-like and object bases, respectively. We may have some other kind of
4295 // base which the generic Iter::init handles correctly.
4297 // As a result, the simplest code we could have here is the generic case.
4298 // It's also about as fast as it can get, because at this point, we're almost
4299 // always going to create an object iter, which can't really be optimized.
4302 if (it->init(base)) {
4303 tvAsVariant(val) = it->val();
4304 if (key) tvAsVariant(key) = it->key();
4305 } else {
4306 pc = targetpc;
4308 if (!local) vmStack().popC();
4311 void implIterNext(PC& pc, const IterArgs& ita, TypedValue* base, PC targetpc) {
4312 auto val = frame_local(vmfp(), ita.valId);
4313 auto key = ita.hasKey() ? frame_local(vmfp(), ita.keyId) : nullptr;
4314 auto it = frame_iter(vmfp(), ita.iterId);
4316 auto const more = [&]{
4317 if (base != nullptr && isArrayLikeType(base->m_type)) {
4318 auto const arr = base->m_data.parr;
4319 return key ? liter_next_key_ind(it, val, key, arr)
4320 : liter_next_ind(it, val, arr);
4322 return key ? iter_next_key_ind(it, val, key) : iter_next_ind(it, val);
4323 }();
4325 if (more) {
4326 vmpc() = targetpc;
4327 jmpSurpriseCheck(targetpc - pc);
4328 pc = targetpc;
4334 OPTBLD_INLINE void iopIterInit(PC& pc, const IterArgs& ita, PC targetpc) {
4335 auto const op = IterTypeOp::NonLocal;
4336 implIterInit(pc, ita, nullptr, targetpc, op);
4339 OPTBLD_INLINE void iopLIterInit(PC& pc, const IterArgs& ita,
4340 TypedValue* base, PC targetpc) {
4341 auto const op = ita.flags & IterArgs::Flags::BaseConst
4342 ? IterTypeOp::LocalBaseConst
4343 : IterTypeOp::LocalBaseMutable;
4344 implIterInit(pc, ita, base, targetpc, op);
4347 OPTBLD_INLINE void iopIterNext(PC& pc, const IterArgs& ita, PC targetpc) {
4348 implIterNext(pc, ita, nullptr, targetpc);
4351 OPTBLD_INLINE void iopLIterNext(PC& pc, const IterArgs& ita,
4352 TypedValue* base, PC targetpc) {
4353 implIterNext(pc, ita, base, targetpc);
4356 OPTBLD_INLINE void iopIterFree(Iter* it) {
4357 it->free();
4360 OPTBLD_INLINE void iopLIterFree(Iter* it, tv_lval) {
4361 it->free();
4364 OPTBLD_INLINE void inclOp(InclOpFlags flags, const char* opName) {
4365 TypedValue* c1 = vmStack().topC();
4366 auto path = String::attach(prepareKey(*c1));
4367 bool initial;
4368 TRACE(2, "inclOp %s %s %s %s \"%s\"\n",
4369 flags & InclOpFlags::Once ? "Once" : "",
4370 flags & InclOpFlags::DocRoot ? "DocRoot" : "",
4371 flags & InclOpFlags::Relative ? "Relative" : "",
4372 flags & InclOpFlags::Fatal ? "Fatal" : "",
4373 path.data());
4375 auto curUnitFilePath = [&] {
4376 namespace fs = boost::filesystem;
4377 fs::path currentUnit(vmfp()->func()->unit()->filepath()->data());
4378 fs::path currentDir(currentUnit.branch_path());
4379 return currentDir.string();
4382 auto const unit = [&] {
4383 if (flags & InclOpFlags::Relative) {
4384 String absPath = curUnitFilePath() + '/';
4385 absPath += path;
4386 return lookupUnit(absPath.get(), "", &initial,
4387 Native::s_noNativeFuncs, false);
4389 if (flags & InclOpFlags::DocRoot) {
4390 return lookupUnit(
4391 SourceRootInfo::RelativeToPhpRoot(path).get(), "", &initial,
4392 Native::s_noNativeFuncs, false);
4394 return lookupUnit(path.get(), curUnitFilePath().c_str(), &initial,
4395 Native::s_noNativeFuncs, false);
4396 }();
4398 vmStack().popC();
4399 if (unit == nullptr) {
4400 if (flags & InclOpFlags::Fatal) {
4401 raise_error("%s(%s): File not found", opName, path.data());
4402 } else {
4403 raise_warning("%s(%s): File not found", opName, path.data());
4405 vmStack().pushBool(false);
4406 return;
4409 if (!(flags & InclOpFlags::Once) || initial) {
4410 unit->merge();
4412 vmStack().pushBool(true);
4415 OPTBLD_INLINE void iopIncl() {
4416 inclOp(InclOpFlags::Default, "include");
4419 OPTBLD_INLINE void iopInclOnce() {
4420 inclOp(InclOpFlags::Once, "include_once");
4423 OPTBLD_INLINE void iopReq() {
4424 inclOp(InclOpFlags::Fatal, "require");
4427 OPTBLD_INLINE void iopReqOnce() {
4428 inclOp(InclOpFlags::Fatal | InclOpFlags::Once, "require_once");
4431 OPTBLD_INLINE void iopReqDoc() {
4432 inclOp(
4433 InclOpFlags::Fatal | InclOpFlags::Once | InclOpFlags::DocRoot,
4434 "require_once"
4438 OPTBLD_INLINE void iopEval() {
4439 TypedValue* c1 = vmStack().topC();
4441 if (UNLIKELY(RuntimeOption::EvalAuthoritativeMode)) {
4442 // Ahead of time whole program optimizations need to assume it can
4443 // see all the code, or it really can't do much.
4444 raise_error("You can't use eval in RepoAuthoritative mode");
4447 auto code = String::attach(prepareKey(*c1));
4448 String prefixedCode = concat("<?hh ", code);
4450 auto evalFilename = std::string();
4451 auto vm = &*g_context;
4452 string_printf(
4453 evalFilename,
4454 "%s(%d)(%s" EVAL_FILENAME_SUFFIX,
4455 vm->getContainingFileName()->data(),
4456 vm->getLine(),
4457 string_md5(code.slice()).c_str()
4459 auto unit = compileEvalString(prefixedCode.get(), evalFilename.c_str());
4460 if (!RuntimeOption::EvalJitEvaledCode) {
4461 unit->setInterpretOnly();
4464 vmStack().popC();
4465 if (auto const info = unit->getFatalInfo()) {
4466 auto const errnum = static_cast<int>(ErrorMode::WARNING);
4467 if (vm->errorNeedsLogging(errnum)) {
4468 // manual call to Logger instead of logError as we need to use
4469 // evalFileName and line as the exception doesn't track the eval()
4470 Logger::Error(
4471 "\nFatal error: %s in %s on line %d",
4472 info->m_fatalMsg.c_str(),
4473 evalFilename.c_str(),
4474 info->m_fatalLoc.line1
4478 vmStack().pushBool(false);
4479 return;
4481 unit->merge();
4482 vmStack().pushBool(true);
4485 OPTBLD_INLINE void iopThis() {
4486 checkThis(vmfp());
4487 ObjectData* this_ = vmfp()->getThis();
4488 vmStack().pushObject(this_);
4491 OPTBLD_INLINE void iopBareThis(BareThisOp bto) {
4492 if (vmfp()->func()->cls() && vmfp()->hasThis()) {
4493 ObjectData* this_ = vmfp()->getThis();
4494 vmStack().pushObject(this_);
4495 } else {
4496 vmStack().pushNull();
4497 switch (bto) {
4498 case BareThisOp::Notice: raise_notice(Strings::WARN_NULL_THIS); break;
4499 case BareThisOp::NoNotice: break;
4500 case BareThisOp::NeverNull:
4501 assertx(!"$this cannot be null in BareThis with NeverNull option");
4502 break;
4507 OPTBLD_INLINE void iopCheckThis() {
4508 checkThis(vmfp());
4511 OPTBLD_INLINE void iopChainFaults() {
4512 auto const current = *vmStack().indC(1);
4513 auto const prev = *vmStack().indC(0);
4514 if (!isObjectType(current.m_type) ||
4515 !current.m_data.pobj->instanceof(SystemLib::s_ThrowableClass) ||
4516 !isObjectType(prev.m_type) ||
4517 !prev.m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) {
4518 raise_error(
4519 "Inputs to ChainFault must be objects that implement Throwable"
4523 // chainFaultObjects takes ownership of a reference to prev.
4524 vmStack().discard();
4525 chainFaultObjects(current.m_data.pobj, prev.m_data.pobj);
4528 OPTBLD_INLINE void iopLateBoundCls() {
4529 auto const cls = frameStaticClass(vmfp());
4530 if (!cls) raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
4531 vmStack().pushClass(cls);
4534 OPTBLD_INLINE void iopVerifyParamType(local_var param) {
4535 const Func *func = vmfp()->func();
4536 assertx(param.index < func->numParams());
4537 assertx(func->numParams() == int(func->params().size()));
4538 const TypeConstraint& tc = func->params()[param.index].typeConstraint;
4539 if (tc.isCheckable()) {
4540 auto const ctx = tc.isThis() ? frameStaticClass(vmfp()) : nullptr;
4541 tc.verifyParam(param.lval, ctx, func, param.index);
4543 if (func->hasParamsWithMultiUBs()) {
4544 auto& ubs = const_cast<Func::ParamUBMap&>(func->paramUBs());
4545 auto it = ubs.find(param.index);
4546 if (it != ubs.end()) {
4547 for (auto& ub : it->second) {
4548 applyFlagsToUB(ub, tc);
4549 if (ub.isCheckable()) {
4550 auto const ctx = ub.isThis() ? frameStaticClass(vmfp()) : nullptr;
4551 ub.verifyParam(param.lval, ctx, func, param.index);
4558 OPTBLD_INLINE void iopVerifyParamTypeTS(local_var param) {
4559 iopVerifyParamType(param);
4560 auto const cell = vmStack().topC();
4561 assertx(tvIsDict(cell));
4562 auto isTypeVar = tcCouldBeReified(vmfp()->func(), param.index);
4563 bool warn = false;
4564 if ((isTypeVar || tvIsObject(param.lval)) &&
4565 !verifyReifiedLocalType(
4566 param.lval, cell->m_data.parr, frameStaticClass(vmfp()), vmfp()->func(),
4567 isTypeVar, warn)) {
4568 raise_reified_typehint_error(
4569 folly::sformat(
4570 "Argument {} passed to {}() must be an instance of {}, {} given",
4571 param.index + 1,
4572 vmfp()->func()->fullName()->data(),
4573 TypeStructure::toString(ArrNR(cell->m_data.parr),
4574 TypeStructure::TSDisplayType::TSDisplayTypeUser).c_str(),
4575 describe_actual_type(param.lval)
4576 ), warn
4579 vmStack().popC();
4582 OPTBLD_INLINE void iopVerifyOutType(uint32_t paramId) {
4583 auto const func = vmfp()->func();
4584 assertx(paramId < func->numParams());
4585 assertx(func->numParams() == int(func->params().size()));
4586 auto const& tc = func->params()[paramId].typeConstraint;
4587 if (tc.isCheckable()) {
4588 auto const ctx = tc.isThis() ? frameStaticClass(vmfp()) : nullptr;
4589 tc.verifyOutParam(vmStack().topTV(), ctx, func, paramId);
4591 if (func->hasParamsWithMultiUBs()) {
4592 auto& ubs = const_cast<Func::ParamUBMap&>(func->paramUBs());
4593 auto it = ubs.find(paramId);
4594 if (it != ubs.end()) {
4595 for (auto& ub : it->second) {
4596 applyFlagsToUB(ub, tc);
4597 if (ub.isCheckable()) {
4598 auto const ctx = ub.isThis() ? frameStaticClass(vmfp()) : nullptr;
4599 ub.verifyOutParam(vmStack().topTV(), ctx, func, paramId);
4606 namespace {
4608 OPTBLD_INLINE void verifyRetTypeImpl(size_t ind) {
4609 const auto func = vmfp()->func();
4610 const auto tc = func->returnTypeConstraint();
4611 if (tc.isCheckable()) {
4612 auto const ctx = tc.isThis() ? frameStaticClass(vmfp()) : nullptr;
4613 tc.verifyReturn(vmStack().indC(ind), ctx, func);
4615 if (func->hasReturnWithMultiUBs()) {
4616 auto& ubs = const_cast<Func::UpperBoundVec&>(func->returnUBs());
4617 for (auto& ub : ubs) {
4618 applyFlagsToUB(ub, tc);
4619 if (ub.isCheckable()) {
4620 auto const ctx = ub.isThis() ? frameStaticClass(vmfp()) : nullptr;
4621 ub.verifyReturn(vmStack().indC(ind), ctx, func);
4627 } // namespace
4629 OPTBLD_INLINE void iopVerifyRetTypeC() {
4630 verifyRetTypeImpl(0); // TypedValue is on the top of the stack
4633 OPTBLD_INLINE void iopVerifyRetTypeTS() {
4634 verifyRetTypeImpl(1); // TypedValue is the second element on the stack
4635 auto const ts = vmStack().topC();
4636 assertx(tvIsDict(ts));
4637 auto const cell = vmStack().indC(1);
4638 bool isTypeVar = tcCouldBeReified(vmfp()->func(), TypeConstraint::ReturnId);
4639 bool warn = false;
4640 if ((isTypeVar || tvIsObject(cell)) &&
4641 !verifyReifiedLocalType(
4642 cell, ts->m_data.parr, frameStaticClass(vmfp()), vmfp()->func(),
4643 isTypeVar, warn)) {
4644 raise_reified_typehint_error(
4645 folly::sformat(
4646 "Value returned from function {}() must be of type {}, {} given",
4647 vmfp()->func()->fullName()->data(),
4648 TypeStructure::toString(ArrNR(ts->m_data.parr),
4649 TypeStructure::TSDisplayType::TSDisplayTypeUser).c_str(),
4650 describe_actual_type(cell)
4651 ), warn
4654 vmStack().popC();
4657 OPTBLD_INLINE void iopVerifyRetNonNullC() {
4658 const auto func = vmfp()->func();
4659 const auto tc = func->returnTypeConstraint();
4660 auto const ctx = tc.isThis() ? frameStaticClass(vmfp()) : nullptr;
4661 tc.verifyReturnNonNull(vmStack().topC(), ctx, func);
4664 OPTBLD_INLINE JitResumeAddr iopNativeImpl(PC& pc) {
4665 auto const fp = vmfp();
4666 auto const func = vmfp()->func();
4667 auto const sfp = fp->sfp();
4668 auto const jitReturn = jitReturnPre(fp);
4669 auto const native = func->arFuncPtr();
4670 assertx(native != nullptr);
4671 // Actually call the native implementation. This will handle freeing the
4672 // locals in the normal case. In the case of an exception, the VM unwinder
4673 // will take care of it.
4674 native(fp);
4676 // Adjust the stack; the native implementation put the return value in the
4677 // right place for us already
4678 vmStack().ndiscard(func->numSlotsInFrame());
4679 vmStack().ret();
4681 // Return control to the caller.
4682 returnToCaller(pc, sfp, jitReturn.callOff);
4683 return jitReturnPost(jitReturn);
4686 OPTBLD_INLINE void iopSelfCls() {
4687 auto const clss = arGetContextClass(vmfp());
4688 if (!clss) raise_error(HPHP::Strings::CANT_ACCESS_SELF);
4689 vmStack().pushClass(clss);
4692 OPTBLD_INLINE void iopParentCls() {
4693 auto const clss = arGetContextClass(vmfp());
4694 if (!clss) raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
4695 auto const parent = clss->parent();
4696 if (!parent) raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
4697 vmStack().pushClass(parent);
4700 OPTBLD_INLINE void iopCreateCl(uint32_t numArgs, uint32_t clsIx) {
4701 auto const func = vmfp()->func();
4702 auto const preCls = func->unit()->lookupPreClassId(clsIx);
4703 auto const c = Class::defClosure(preCls, true);
4705 auto const cls = c->rescope(const_cast<Class*>(func->cls()));
4706 assertx(!cls->needInitialization());
4707 auto obj = RuntimeOption::RepoAuthoritative
4708 ? createClosureRepoAuth(cls) : createClosure(cls);
4709 c_Closure::fromObject(obj)->init(numArgs, vmfp(), vmStack().top());
4710 vmStack().ndiscard(numArgs);
4711 vmStack().pushObjectNoRc(obj);
4714 static inline BaseGenerator* this_base_generator(const ActRec* fp) {
4715 auto const obj = fp->getThis();
4716 assertx(obj->getVMClass() == AsyncGenerator::getClass() ||
4717 obj->getVMClass() == Generator::getClass());
4718 return obj->getVMClass() == Generator::getClass()
4719 ? static_cast<BaseGenerator*>(Generator::fromObject(obj))
4720 : static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj));
4723 static inline Generator* this_generator(const ActRec* fp) {
4724 auto const obj = fp->getThis();
4725 return Generator::fromObject(obj);
4728 const StaticString s_this("this");
4730 OPTBLD_INLINE JitResumeAddr iopCreateCont(PC origpc, PC& pc) {
4731 auto const jitReturn = jitReturnPre(vmfp());
4733 auto const fp = vmfp();
4734 auto const func = fp->func();
4735 auto const numSlots = func->numSlotsInFrame();
4736 auto const suspendOffset = func->offsetOf(origpc);
4737 assertx(!isResumed(fp));
4738 assertx(func->isGenerator());
4740 // Create the {Async,}Generator object. Create takes care of copying local
4741 // variables and iterators.
4742 auto const obj = func->isAsync()
4743 ? AsyncGenerator::Create(fp, numSlots, nullptr, suspendOffset)
4744 : Generator::Create(fp, numSlots, nullptr, suspendOffset);
4746 auto const genData = func->isAsync() ?
4747 static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj)) :
4748 static_cast<BaseGenerator*>(Generator::fromObject(obj));
4750 EventHook::FunctionSuspendCreateCont(
4752 genData->actRec(),
4753 EventHook::Source::Interpreter
4756 // Grab caller info from ActRec.
4757 ActRec* sfp = fp->sfp();
4758 Offset callOff = fp->callOffset();
4760 // Free ActRec and store the return value.
4761 vmStack().ndiscard(numSlots);
4762 vmStack().ret();
4763 tvCopy(make_tv<KindOfObject>(obj), *vmStack().topTV());
4764 assertx(vmStack().topTV() == fp->retSlot());
4766 // Return control to the caller.
4767 returnToCaller(pc, sfp, callOff);
4769 return jitReturnPost(jitReturn);
4772 OPTBLD_INLINE JitResumeAddr contEnterImpl(PC origpc) {
4773 // The stack must have one cell! Or else resumableStackBase() won't work!
4774 assertx(vmStack().top() + 1 ==
4775 (TypedValue*)vmfp() - vmfp()->func()->numSlotsInFrame());
4777 auto const gen = this_base_generator(vmfp());
4778 auto const genAR = gen->actRec();
4780 // Stack overflow check.
4781 checkStack(vmStack(), genAR->func(), 0);
4783 // Point of no return, set the generator state to Running.
4784 assertx(!gen->isRunning() && !gen->isDone());
4785 gen->setState(BaseGenerator::State::Running);
4787 // Set up previous FP and return address.
4788 auto const retHelper = genAR->func()->isAsync()
4789 ? jit::tc::ustubs().asyncGenRetHelper
4790 : jit::tc::ustubs().genRetHelper;
4791 genAR->setReturn(vmfp(), origpc, retHelper, false);
4793 // Enter the generator frame.
4794 vmfp() = genAR;
4795 vmpc() = genAR->func()->at(gen->resumable()->resumeFromYieldOffset());
4797 EventHook::FunctionResumeYield(vmfp(), EventHook::Source::Interpreter);
4799 return JitResumeAddr::trans(gen->resumable()->resumeAddr());
4802 OPTBLD_INLINE JitResumeAddr iopContEnter(PC origpc, PC& pc) {
4803 auto const retAddr = contEnterImpl(origpc);
4804 pc = vmpc();
4805 return retAddr;
4808 OPTBLD_INLINE void iopContRaise(PC origpc, PC& pc) {
4809 contEnterImpl(origpc);
4810 pc = vmpc();
4811 iopThrow(pc);
4814 OPTBLD_INLINE JitResumeAddr yield(PC origpc, PC& pc, const TypedValue* key,
4815 const TypedValue value) {
4816 auto const jitReturn = jitReturnPre(vmfp());
4818 auto const fp = vmfp();
4819 auto const func = fp->func();
4820 auto const suspendOffset = func->offsetOf(origpc);
4821 assertx(isResumed(fp));
4822 assertx(func->isGenerator());
4824 EventHook::FunctionSuspendYield(fp, EventHook::Source::Interpreter);
4826 auto const sfp = fp->sfp();
4827 auto const callOff = fp->callOffset();
4829 if (!func->isAsync()) {
4830 // Non-async generator.
4831 assertx(fp->sfp());
4832 frame_generator(fp)->yield(suspendOffset, key, value);
4834 // Push return value of next()/send()/raise().
4835 vmStack().pushNull();
4836 } else {
4837 // Async generator.
4838 auto const gen = frame_async_generator(fp);
4839 auto const eagerResult = gen->yield(suspendOffset, key, value);
4840 if (eagerResult) {
4841 // Eager execution => return StaticWaitHandle.
4842 assertx(sfp);
4843 vmStack().pushObjectNoRc(eagerResult);
4844 } else {
4845 // Resumed execution => return control to the scheduler.
4846 assertx(!sfp);
4850 returnToCaller(pc, sfp, callOff);
4852 return jitReturnPost(jitReturn);
4855 OPTBLD_INLINE JitResumeAddr iopYield(PC origpc, PC& pc) {
4856 auto const value = *vmStack().topC();
4857 vmStack().discard();
4858 return yield(origpc, pc, nullptr, value);
4861 OPTBLD_INLINE JitResumeAddr iopYieldK(PC origpc, PC& pc) {
4862 auto const key = *vmStack().indC(1);
4863 auto const value = *vmStack().topC();
4864 vmStack().ndiscard(2);
4865 return yield(origpc, pc, &key, value);
4868 OPTBLD_INLINE void iopContCheck(ContCheckOp subop) {
4869 this_base_generator(vmfp())->checkNext(subop == ContCheckOp::CheckStarted);
4872 OPTBLD_INLINE void iopContValid() {
4873 vmStack().pushBool(
4874 this_generator(vmfp())->getState() != BaseGenerator::State::Done);
4877 OPTBLD_INLINE void iopContKey() {
4878 Generator* cont = this_generator(vmfp());
4879 cont->startedCheck();
4880 tvDup(cont->m_key, *vmStack().allocC());
4883 OPTBLD_INLINE void iopContCurrent() {
4884 Generator* cont = this_generator(vmfp());
4885 cont->startedCheck();
4887 if(cont->getState() == BaseGenerator::State::Done) {
4888 vmStack().pushNull();
4889 } else {
4890 tvDup(cont->m_value, *vmStack().allocC());
4894 OPTBLD_INLINE void iopContGetReturn() {
4895 Generator* cont = this_generator(vmfp());
4896 cont->startedCheck();
4898 if(!cont->successfullyFinishedExecuting()) {
4899 SystemLib::throwExceptionObject("Cannot get return value of a generator "
4900 "that hasn't returned");
4903 tvDup(cont->m_value, *vmStack().allocC());
4906 OPTBLD_INLINE void asyncSuspendE(PC origpc, PC& pc) {
4907 auto const fp = vmfp();
4908 auto const func = fp->func();
4909 auto const suspendOffset = func->offsetOf(origpc);
4910 assertx(func->isAsync());
4911 assertx(resumeModeFromActRec(fp) != ResumeMode::Async);
4913 // Pop the dependency we are blocked on.
4914 auto child = wait_handle<c_WaitableWaitHandle>(*vmStack().topC());
4915 assertx(!child->isFinished());
4916 vmStack().discard();
4918 if (!func->isGenerator()) { // Async function.
4919 // Create the AsyncFunctionWaitHandle object. Create takes care of
4920 // copying local variables and itertors.
4921 auto waitHandle = c_AsyncFunctionWaitHandle::Create(
4922 fp, func->numSlotsInFrame(), nullptr, suspendOffset, child);
4924 if (RO::EvalEnableImplicitContext) {
4925 waitHandle->m_implicitContext = *ImplicitContext::activeCtx;
4927 // Call the suspend hook. It will decref the newly allocated waitHandle
4928 // if it throws.
4929 EventHook::FunctionSuspendAwaitEF(
4931 waitHandle->actRec(),
4932 EventHook::Source::Interpreter
4935 // Grab caller info from ActRec.
4936 ActRec* sfp = fp->sfp();
4937 Offset callOff = fp->callOffset();
4939 // Free ActRec and store the return value. In case async eager return was
4940 // requested by the caller, let it know that we did not finish eagerly.
4941 vmStack().ndiscard(func->numSlotsInFrame());
4942 vmStack().ret();
4943 tvCopy(make_tv<KindOfObject>(waitHandle), *vmStack().topTV());
4944 vmStack().topTV()->m_aux.u_asyncEagerReturnFlag = 0;
4945 assertx(vmStack().topTV() == fp->retSlot());
4947 // Return control to the caller.
4948 returnToCaller(pc, sfp, callOff);
4949 } else { // Async generator.
4950 // Create new AsyncGeneratorWaitHandle.
4951 auto waitHandle = c_AsyncGeneratorWaitHandle::Create(
4952 fp, nullptr, suspendOffset, child);
4954 if (RO::EvalEnableImplicitContext) {
4955 waitHandle->m_implicitContext = *ImplicitContext::activeCtx;
4958 // Call the suspend hook. It will decref the newly allocated waitHandle
4959 // if it throws.
4960 EventHook::FunctionSuspendAwaitEG(fp, EventHook::Source::Interpreter);
4962 // Store the return value.
4963 vmStack().pushObjectNoRc(waitHandle);
4965 // Return control to the caller (AG::next()).
4966 assertx(fp->sfp());
4967 returnToCaller(pc, fp->sfp(), fp->callOffset());
4971 OPTBLD_INLINE void asyncSuspendR(PC origpc, PC& pc) {
4972 auto const fp = vmfp();
4973 auto const func = fp->func();
4974 auto const suspendOffset = func->offsetOf(origpc);
4975 assertx(!fp->sfp());
4976 assertx(func->isAsync());
4977 assertx(resumeModeFromActRec(fp) == ResumeMode::Async);
4979 // Pop the dependency we are blocked on.
4980 auto child = req::ptr<c_WaitableWaitHandle>::attach(
4981 wait_handle<c_WaitableWaitHandle>(*vmStack().topC()));
4982 assertx(!child->isFinished());
4983 vmStack().discard();
4985 // Before adjusting the stack or doing anything, check the suspend hook.
4986 // This can throw.
4987 EventHook::FunctionSuspendAwaitR(
4989 child.get(),
4990 EventHook::Source::Interpreter
4993 // Await child and suspend the async function/generator. May throw.
4994 if (!func->isGenerator()) { // Async function.
4995 if (RO::EvalEnableImplicitContext) {
4996 frame_afwh(fp)->m_implicitContext = *ImplicitContext::activeCtx;
4998 frame_afwh(fp)->await(suspendOffset, std::move(child));
4999 } else { // Async generator.
5000 auto const gen = frame_async_generator(fp);
5001 gen->resumable()->setResumeAddr(nullptr, suspendOffset);
5002 if (RO::EvalEnableImplicitContext) {
5003 gen->getWaitHandle()->m_implicitContext = *ImplicitContext::activeCtx;
5005 gen->getWaitHandle()->await(std::move(child));
5008 // Return control to the scheduler.
5009 pc = nullptr;
5010 vmfp() = nullptr;
5013 namespace {
5015 JitResumeAddr suspendStack(PC origpc, PC &pc) {
5016 auto const jitReturn = jitReturnPre(vmfp());
5017 if (resumeModeFromActRec(vmfp()) == ResumeMode::Async) {
5018 // suspend resumed execution
5019 asyncSuspendR(origpc, pc);
5020 } else {
5021 // suspend eager execution
5022 asyncSuspendE(origpc, pc);
5024 return jitReturnPost(jitReturn);
5029 OPTBLD_INLINE JitResumeAddr iopAwait(PC origpc, PC& pc) {
5030 auto const awaitable = vmStack().topC();
5031 auto wh = c_Awaitable::fromTV(*awaitable);
5032 if (UNLIKELY(wh == nullptr)) {
5033 SystemLib::throwBadMethodCallExceptionObject("Await on a non-Awaitable");
5035 if (LIKELY(wh->isFailed())) {
5036 throw req::root<Object>{wh->getException()};
5038 if (wh->isSucceeded()) {
5039 tvSet(wh->getResult(), *vmStack().topC());
5040 return JitResumeAddr::none();
5042 return suspendStack(origpc, pc);
5045 OPTBLD_INLINE JitResumeAddr iopAwaitAll(PC origpc, PC& pc, LocalRange locals) {
5046 uint32_t cnt = 0;
5047 for (auto i = locals.first; i < locals.first + locals.count; ++i) {
5048 auto const local = *frame_local(vmfp(), i);
5049 if (tvIsNull(local)) continue;
5050 auto const awaitable = c_Awaitable::fromTV(local);
5051 if (UNLIKELY(awaitable == nullptr)) {
5052 SystemLib::throwBadMethodCallExceptionObject("Await on a non-Awaitable");
5054 if (!awaitable->isFinished()) {
5055 ++cnt;
5059 if (!cnt) {
5060 vmStack().pushNull();
5061 return JitResumeAddr::none();
5064 auto obj = Object::attach(
5065 c_AwaitAllWaitHandle::fromFrameNoCheck(vmfp(), locals.first,
5066 locals.first + locals.count, cnt)
5068 assertx(obj->isWaitHandle());
5069 if (UNLIKELY(static_cast<c_Awaitable*>(obj.get())->isFinished())) {
5070 // A profiler hook may have finished the AAWH.
5071 vmStack().pushNull();
5072 return JitResumeAddr::none();
5075 vmStack().pushObjectNoRc(obj.detach());
5076 return suspendStack(origpc, pc);
5079 OPTBLD_INLINE void iopWHResult() {
5080 // we should never emit this bytecode for non-waithandle
5081 auto const wh = c_Awaitable::fromTV(*vmStack().topC());
5082 if (UNLIKELY(!wh)) {
5083 raise_error("WHResult input was not a subclass of Awaitable");
5086 // the failure condition is likely since we punt to this opcode
5087 // in the JIT when the state is failed.
5088 if (wh->isFailed()) {
5089 throw_object(Object{wh->getException()});
5091 if (wh->isSucceeded()) {
5092 tvSet(wh->getResult(), *vmStack().topC());
5093 return;
5095 SystemLib::throwInvalidOperationExceptionObject(
5096 "Request for result on pending wait handle, "
5097 "must await or join() before calling result()");
5098 not_reached();
5101 OPTBLD_INLINE void iopSetImplicitContextByValue() {
5102 if (!RO::EvalEnableImplicitContext) {
5103 vmStack().popC();
5104 vmStack().pushNull();
5105 return;
5107 auto const tv = vmStack().topC();
5108 auto const obj = [&]() -> ObjectData* {
5109 if (tvIsNull(tv)) return nullptr;
5110 if (UNLIKELY(!tvIsObject(tv))) {
5111 SystemLib::throwInvalidArgumentExceptionObject(
5112 "Invalid input to SetImplicitContextByValue");
5114 return tv->m_data.pobj;
5115 }();
5116 vmStack().discard(); // ref-count will be transferred
5117 auto result = ImplicitContext::setByValue(Object::attach(obj));
5118 if (result.isNull()) {
5119 vmStack().pushNull();
5120 } else {
5121 vmStack().pushObjectNoRc(result.detach());
5125 OPTBLD_INLINE void iopCheckProp(const StringData* propName) {
5126 auto* cls = vmfp()->getClass();
5127 auto* propVec = cls->getPropData();
5128 always_assert(propVec);
5130 auto* ctx = arGetContextClass(vmfp());
5131 auto slot = ctx->lookupDeclProp(propName);
5132 auto index = cls->propSlotToIndex(slot);
5134 auto const val = (*propVec)[index].val;
5135 vmStack().pushBool(type(val) != KindOfUninit);
5138 OPTBLD_INLINE void iopInitProp(const StringData* propName, InitPropOp propOp) {
5139 auto* cls = vmfp()->getClass();
5141 auto* ctx = arGetContextClass(vmfp());
5142 auto* fr = vmStack().topC();
5144 auto lval = [&] () -> tv_lval {
5145 switch (propOp) {
5146 case InitPropOp::Static: {
5147 auto const slot = ctx->lookupSProp(propName);
5148 assertx(slot != kInvalidSlot);
5149 auto ret = cls->getSPropData(slot);
5150 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
5151 auto const& sprop = cls->staticProperties()[slot];
5152 auto const& tc = sprop.typeConstraint;
5153 if (tc.isCheckable()) {
5154 tc.verifyStaticProperty(fr, cls, sprop.cls, sprop.name);
5157 return ret;
5160 case InitPropOp::NonStatic: {
5161 auto* propVec = cls->getPropData();
5162 always_assert(propVec);
5163 auto const slot = ctx->lookupDeclProp(propName);
5164 auto const index = cls->propSlotToIndex(slot);
5165 assertx(slot != kInvalidSlot);
5166 auto ret = (*propVec)[index].val;
5167 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
5168 auto const& prop = cls->declProperties()[slot];
5169 auto const& tc = prop.typeConstraint;
5170 if (tc.isCheckable()) tc.verifyProperty(fr, cls, prop.cls, prop.name);
5172 return ret;
5175 always_assert(false);
5176 }();
5178 tvDup(*fr, lval);
5179 vmStack().popC();
5182 OPTBLD_INLINE void iopOODeclExists(OODeclExistsOp subop) {
5183 TypedValue* aloadTV = vmStack().topTV();
5184 if (aloadTV->m_type != KindOfBoolean) {
5185 raise_error("OODeclExists: Expected Bool on top of stack, got %s",
5186 tname(aloadTV->m_type).c_str());
5189 bool autoload = aloadTV->m_data.num;
5190 vmStack().popX();
5192 TypedValue* name = vmStack().topTV();
5193 if (!isStringType(name->m_type)) {
5194 raise_error("OODeclExists: Expected String on stack, got %s",
5195 tname(aloadTV->m_type).c_str());
5198 ClassKind kind;
5199 switch (subop) {
5200 case OODeclExistsOp::Class : kind = ClassKind::Class; break;
5201 case OODeclExistsOp::Trait : kind = ClassKind::Trait; break;
5202 case OODeclExistsOp::Interface : kind = ClassKind::Interface; break;
5204 tvAsVariant(name) = Class::exists(name->m_data.pstr, autoload, kind);
5207 OPTBLD_INLINE void iopSilence(tv_lval loc, SilenceOp subop) {
5208 switch (subop) {
5209 case SilenceOp::Start:
5210 type(loc) = KindOfInt64;
5211 val(loc).num = zero_error_level();
5212 break;
5213 case SilenceOp::End:
5214 assertx(type(loc) == KindOfInt64);
5215 restore_error_level(val(loc).num);
5216 break;
5220 std::string prettyStack(const std::string& prefix) {
5221 if (!vmfp()) return "__Halted";
5222 int offset = (vmfp()->func()->unit() != nullptr)
5223 ? pcOff() : 0;
5224 auto begPrefix = prefix + "__";
5225 auto midPrefix = prefix + "|| ";
5226 auto endPrefix = prefix + "\\/";
5227 auto stack = vmStack().toString(vmfp(), offset, midPrefix);
5228 return begPrefix + "\n" + stack + endPrefix;
5231 // callable from gdb
5232 void DumpStack() {
5233 fprintf(stderr, "%s\n", prettyStack("").c_str());
5236 // callable from gdb
5237 void DumpCurUnit(int skip) {
5238 ActRec* fp = vmfp();
5239 Offset pc = fp->func()->unit() ? pcOff() : 0;
5240 while (skip--) {
5241 fp = g_context->getPrevVMState(fp, &pc);
5243 if (fp == nullptr) {
5244 std::cout << "Don't have a valid fp\n";
5245 return;
5248 printf("Offset = %d, in function %s\n", pc, fp->func()->name()->data());
5249 Unit* u = fp->func()->unit();
5250 if (u == nullptr) {
5251 std::cout << "Current unit is NULL\n";
5252 return;
5254 printf("Dumping bytecode for %s(%p)\n", u->filepath()->data(), u);
5255 std::cout << u->toString();
5258 // callable from gdb
5259 void PrintTCCallerInfo() {
5260 VMRegAnchor _;
5262 auto const f = vmfp()->func();
5263 auto const u = f->unit();
5264 auto const rip = []() -> jit::TCA {
5265 DECLARE_FRAME_POINTER(reg_fp);
5266 // NB: We can't directly mutate the register-mapped `reg_fp'.
5267 for (ActRec* fp = reg_fp; fp; fp = fp->m_sfp) {
5268 auto const rip = jit::TCA(fp->m_savedRip);
5269 if (jit::tc::isValidCodeAddress(rip)) return rip;
5271 return nullptr;
5272 }();
5274 fprintf(stderr, "Called from TC address %p\n", rip);
5275 std::cerr << u->filepath()->data() << ':'
5276 << f->getLineNumber(f->offsetOf(vmpc())) << '\n';
5279 // thread-local cached coverage info
5280 static __thread Unit* s_prev_unit;
5281 static __thread int s_prev_line;
5283 void recordCodeCoverage(PC /*pc*/) {
5284 auto const func = vmfp()->func();
5285 Unit* unit = func->unit();
5286 assertx(unit != nullptr);
5287 if (unit == SystemLib::s_hhas_unit) {
5288 return;
5291 if (!RO::RepoAuthoritative && RO::EvalEnablePerFileCoverage) {
5292 if (unit->isCoverageEnabled()) {
5293 unit->recordCoverage(func->getLineNumber(pcOff()));
5295 return;
5298 int line = func->getLineNumber(pcOff());
5299 assertx(line != -1);
5300 if (unit != s_prev_unit || line != s_prev_line) {
5301 s_prev_unit = unit;
5302 s_prev_line = line;
5303 const StringData* filepath = unit->filepath();
5304 assertx(filepath->isStatic());
5305 RI().m_coverage.Record(filepath->data(), line, line);
5309 void resetCoverageCounters() {
5310 s_prev_line = -1;
5311 s_prev_unit = nullptr;
5314 static inline void
5315 condStackTraceSep(Op opcode) {
5316 TRACE(3, "%s "
5317 "========================================"
5318 "========================================\n",
5319 opcodeToName(opcode));
5322 #define COND_STACKTRACE(pfx)\
5323 ONTRACE(3, auto stack = prettyStack(pfx);\
5324 Trace::trace("%s\n", stack.c_str());)
5326 namespace {
5329 * iopWrapReturn() calls a function pointer and forwards its return value if it
5330 * returns JitResumeAddr, or JitResumeAddr::none() if returns void.
5331 * Some opcodes need the original PC by value, and some do not. We have wrappers
5332 * for both flavors. Some opcodes (FCall*) may want to return to the JIT in the
5333 * middle of an instruction, so we pass the breakOnCtlFlow flag. When this flag
5334 * is true in control flow instructions such as FCall*, we are guaranteed to
5335 * use the returned JitResumeAddr to return to the JIT and so it is safe to
5336 * return in the middle of an instruction.
5338 template<typename... Params, typename... Args>
5339 OPTBLD_INLINE JitResumeAddr
5340 iopWrapReturn(void(fn)(Params...), bool, PC, Args&&... args) {
5341 fn(std::forward<Args>(args)...);
5342 return JitResumeAddr::none();
5345 template<typename... Params, typename... Args>
5346 OPTBLD_INLINE JitResumeAddr
5347 iopWrapReturn(JitResumeAddr(fn)(Params...), bool, PC, Args&&... args) {
5348 return fn(std::forward<Args>(args)...);
5351 template<typename... Params, typename... Args>
5352 OPTBLD_INLINE JitResumeAddr
5353 iopWrapReturn(void(fn)(PC, Params...), bool, PC origpc, Args&&... args) {
5354 fn(origpc, std::forward<Args>(args)...);
5355 return JitResumeAddr::none();
5358 template<typename... Params, typename... Args>
5359 OPTBLD_INLINE JitResumeAddr
5360 iopWrapReturn(JitResumeAddr(fn)(PC, Params...),
5361 bool, PC origpc, Args&&... args) {
5362 return fn(origpc, std::forward<Args>(args)...);
5365 template<typename... Params, typename... Args>
5366 OPTBLD_INLINE JitResumeAddr
5367 iopWrapReturn(JitResumeAddr(fn)(bool, PC, Params...),
5368 bool breakOnCtlFlow, PC origpc, Args&&... args) {
5369 return fn(breakOnCtlFlow, origpc, std::forward<Args>(args)...);
5373 * Some bytecodes with SA immediates want the raw Id to look up a NamedEntity
5374 * quickly, and some want the const StringData*. Support both by decoding to
5375 * this struct and implicitly converting to what the callee wants.
5377 struct litstr_id {
5378 /* implicit */ ALWAYS_INLINE operator const StringData*() const {
5379 return liveUnit()->lookupLitstrId(id);
5381 /* implicit */ ALWAYS_INLINE operator Id() const {
5382 return id;
5385 Id id{kInvalidId};
5389 * These macros are used to generate wrapper functions for the iop*() functions
5390 * defined earlier in this file. iopWrapFoo() decodes immediates from the
5391 * bytecode stream according to the signature of Foo (in hhbc.h), then calls
5392 * iopFoo() with those decoded arguments.
5394 #define FLAG_NF
5395 #define FLAG_TF
5396 #define FLAG_CF , pc
5397 #define FLAG_CF_TF FLAG_CF
5399 #define DECODE_IVA decode_iva(pc)
5400 #define DECODE_I64A decode<int64_t>(pc)
5401 #define DECODE_LA decode_local(pc)
5402 #define DECODE_NLA decode_named_local_var(pc)
5403 #define DECODE_ILA decode_indexed_local(pc)
5404 #define DECODE_IA decode_iter(pc)
5405 #define DECODE_DA decode<double>(pc)
5406 #define DECODE_SA decode<litstr_id>(pc)
5407 #define DECODE_AA decode_litarr(pc)
5408 #define DECODE_RATA decode_rat(pc)
5409 #define DECODE_BA origpc + decode_ba(pc)
5410 #define DECODE_OA(ty) decode<ty>(pc)
5411 #define DECODE_KA decode_member_key(pc, liveUnit())
5412 #define DECODE_LAR decodeLocalRange(pc)
5413 #define DECODE_ITA decodeIterArgs(pc)
5414 #define DECODE_FCA decodeFCallArgs(op, pc, liveUnit())
5415 #define DECODE_BLA decode_imm_array<Offset>(pc)
5416 #define DECODE_SLA decode_imm_array<StrVecItem>(pc)
5417 #define DECODE_VSA decode_imm_array<Id>(pc)
5419 #define DECODE_NA
5420 #define DECODE_ONE(a) auto const imm1 = DECODE_##a;
5421 #define DECODE_TWO(a, b) DECODE_ONE(a) auto const imm2 = DECODE_##b;
5422 #define DECODE_THREE(a, b, c) DECODE_TWO(a, b) auto const imm3 = DECODE_##c;
5423 #define DECODE_FOUR(a, b, c, d) \
5424 DECODE_THREE(a, b, c) auto const imm4 = DECODE_##d;
5425 #define DECODE_FIVE(a, b, c, d, e) \
5426 DECODE_FOUR(a, b, c, d) auto const imm5 = DECODE_##e;
5427 #define DECODE_SIX(a, b, c, d, e, f) \
5428 DECODE_FIVE(a, b, c, d, e) auto const imm6 = DECODE_##f;
5430 #define PASS_NA
5431 #define PASS_ONE(...) , imm1
5432 #define PASS_TWO(...) , imm1, imm2
5433 #define PASS_THREE(...) , imm1, imm2, imm3
5434 #define PASS_FOUR(...) , imm1, imm2, imm3, imm4
5435 #define PASS_FIVE(...) , imm1, imm2, imm3, imm4, imm5
5436 #define PASS_SIX(...) , imm1, imm2, imm3, imm4, imm5, imm6
5438 #ifdef HHVM_TAINT
5439 #define TAINT(name, imm, in, out, flags) \
5440 iopWrapReturn( \
5441 taint::iop##name, breakOnCtlFlow, origpc FLAG_##flags PASS_##imm);
5442 #else
5443 #define TAINT(name, imm, in, out, flags) ;
5444 #endif
5446 #define O(name, imm, in, out, flags) \
5447 template<bool breakOnCtlFlow> \
5448 OPTBLD_INLINE JitResumeAddr iopWrap##name(PC& pc) { \
5449 UNUSED auto constexpr op = Op::name; \
5450 UNUSED auto const origpc = pc - encoded_op_size(op); \
5451 DECODE_##imm \
5452 TAINT(name, imm, in, out, flags); \
5453 return iopWrapReturn( \
5454 iop##name, breakOnCtlFlow, origpc FLAG_##flags PASS_##imm); \
5456 OPCODES
5458 #undef FLAG_NF
5459 #undef FLAG_TF
5460 #undef FLAG_CF
5461 #undef FLAG_CF_TF
5463 #undef DECODE_IVA
5464 #undef DECODE_I64A
5465 #undef DECODE_LA
5466 #undef DECODE_NLA
5467 #undef DECODE_ILA
5468 #undef DECODE_IA
5469 #undef DECODE_DA
5470 #undef DECODE_SA
5471 #undef DECODE_AA
5472 #undef DECODE_RATA
5473 #undef DECODE_BA
5474 #undef DECODE_OA
5475 #undef DECODE_KA
5476 #undef DECODE_LAR
5477 #undef DECODE_FCA
5478 #undef DECODE_BLA
5479 #undef DECODE_SLA
5480 #undef DECODE_VSA
5482 #undef DECODE_NA
5483 #undef DECODE_ONE
5484 #undef DECODE_TWO
5485 #undef DECODE_THREE
5486 #undef DECODE_FOUR
5487 #undef DECODE_FIVE
5489 #undef PASS_NA
5490 #undef PASS_ONE
5491 #undef PASS_TWO
5492 #undef PASS_THREE
5493 #undef PASS_FOUR
5494 #undef PASS_FIVE
5496 #undef O
5501 * The interpOne functions are fat wrappers around the iop* functions, mostly
5502 * adding a bunch of debug-only logging and stats tracking.
5504 #define O(opcode, imm, push, pop, flags) \
5505 JitResumeAddr interpOne##opcode(ActRec* fp, TypedValue* sp, Offset pcOff) { \
5506 interp_set_regs(fp, sp, pcOff); \
5507 SKTRACE(5, liveSK(), \
5508 "%40s %p %p\n", \
5509 "interpOne" #opcode " before (fp,sp)", vmfp(), vmsp()); \
5510 if (Stats::enableInstrCount()) { \
5511 Stats::inc(Stats::Instr_Transl##opcode, -1); \
5512 Stats::inc(Stats::Instr_InterpOne##opcode); \
5514 if (Trace::moduleEnabled(Trace::interpOne, 1)) { \
5515 static const StringData* cat = makeStaticString("interpOne"); \
5516 static const StringData* name = makeStaticString(#opcode); \
5517 Stats::incStatGrouped(cat, name, 1); \
5519 if (Trace::moduleEnabled(Trace::ringbuffer)) { \
5520 auto sk = liveSK().toAtomicInt(); \
5521 Trace::ringbufferEntry(Trace::RBTypeInterpOne, sk, 0); \
5523 INC_TPC(interp_one) \
5524 /* Correct for over-counting in TC-stats. */ \
5525 Stats::inc(Stats::Instr_TC, -1); \
5526 condStackTraceSep(Op##opcode); \
5527 COND_STACKTRACE("op"#opcode" pre: "); \
5528 PC pc = vmpc(); \
5529 ONTRACE(1, auto offset = vmfp()->func()->offsetOf(pc); \
5530 Trace::trace("op"#opcode" offset: %d\n", offset)); \
5531 assertx(peek_op(pc) == Op::opcode); \
5532 pc += encoded_op_size(Op::opcode); \
5533 auto const retAddr = iopWrap##opcode<true>(pc); \
5534 vmpc() = pc; \
5535 COND_STACKTRACE("op"#opcode" post: "); \
5536 condStackTraceSep(Op##opcode); \
5538 * Only set regstate back to dirty if an exception is not
5539 * propagating. If an exception is throwing, regstate for this call
5540 * is actually still correct, and we don't have information in the
5541 * fixup map for interpOne calls anyway.
5542 */ \
5543 regState() = VMRegState::DIRTY; \
5544 return retAddr; \
5546 OPCODES
5547 #undef O
5549 InterpOneFunc interpOneEntryPoints[] = {
5550 #define O(opcode, imm, push, pop, flags) &interpOne##opcode,
5551 OPCODES
5552 #undef O
5555 // fast path to look up native pc; try entry point first.
5556 PcPair lookup_cti(const Func* func, PC pc) {
5557 auto unitpc = func->entry();
5558 auto cti_entry = func->ctiEntry();
5559 if (!cti_entry) {
5560 cti_entry = compile_cti(const_cast<Func*>(func), unitpc);
5562 if (pc == unitpc) {
5563 return {cti_code().base() + cti_entry, pc};
5565 return {lookup_cti(func, cti_entry, unitpc, pc), pc};
5568 template <bool breakOnCtlFlow>
5569 JitResumeAddr dispatchThreaded(bool coverage) {
5570 auto modes = breakOnCtlFlow ? ExecMode::BB : ExecMode::Normal;
5571 if (coverage) {
5572 modes = modes | ExecMode::Coverage;
5574 DEBUGGER_ATTACHED_ONLY(modes = modes | ExecMode::Debugger);
5575 auto target = lookup_cti(vmfp()->func(), vmpc());
5576 CALLEE_SAVED_BARRIER();
5577 auto retAddr = g_enterCti(modes, target, rds::header());
5578 CALLEE_SAVED_BARRIER();
5579 return JitResumeAddr::trans(retAddr);
5582 template <bool breakOnCtlFlow>
5583 JitResumeAddr dispatchImpl() {
5584 auto const checkCoverage = [&] {
5585 return !RO::EvalEnablePerFileCoverage
5586 ? RID().getCoverage()
5587 : vmfp() && vmfp()->unit()->isCoverageEnabled();
5589 bool collectCoverage = checkCoverage();
5591 auto const inlineInterp = [&]{
5592 using IIS = ExecutionContext::InlineInterpState;
5593 auto const state = g_context->m_inlineInterpState;
5594 assertx(IMPLIES(breakOnCtlFlow, state == IIS::NONE));
5595 if constexpr (breakOnCtlFlow) return false;
5597 switch (state) {
5598 case IIS::NONE: return false;
5599 case IIS::START: g_context->m_inlineInterpState = IIS::BLOCK; return true;
5600 case IIS::BLOCK: throw Exception("Re-entry during inline interp");
5601 default: always_assert(false);
5603 }();
5605 if (cti_enabled() && !inlineInterp) {
5606 return dispatchThreaded<breakOnCtlFlow>(collectCoverage);
5609 // Unfortunately, MSVC doesn't support computed
5610 // gotos, so use a switch instead.
5611 #ifndef _MSC_VER
5612 static const void* const optabDirect[] = {
5613 #define O(name, imm, push, pop, flags) \
5614 &&Label##name,
5615 OPCODES
5616 #undef O
5618 static const void* const optabDbg[] = {
5619 #define O(name, imm, push, pop, flags) \
5620 &&LabelDbg##name,
5621 OPCODES
5622 #undef O
5624 static const void* const optabCover[] = {
5625 #define O(name, imm, push, pop, flags) \
5626 &&LabelCover##name,
5627 OPCODES
5628 #undef O
5630 assertx(sizeof(optabDirect) / sizeof(const void *) == Op_count);
5631 assertx(sizeof(optabDbg) / sizeof(const void *) == Op_count);
5632 const void* const* optab = optabDirect;
5633 if (collectCoverage) {
5634 optab = optabCover;
5636 DEBUGGER_ATTACHED_ONLY(optab = optabDbg);
5637 #endif
5639 bool isCtlFlow = false;
5640 auto retAddr = JitResumeAddr::none();
5641 Op op;
5643 #ifdef _MSC_VER
5644 # define DISPATCH_ACTUAL() goto DispatchSwitch
5645 #else
5646 # define DISPATCH_ACTUAL() goto *optab[size_t(op)]
5647 #endif
5649 #define DISPATCH() do { \
5650 if (breakOnCtlFlow && isCtlFlow) { \
5651 ONTRACE(1, \
5652 Trace::trace("dispatch: Halt dispatch(%p)\n", \
5653 vmfp())); \
5654 return retAddr; \
5656 opPC = pc; \
5657 op = decode_op(pc); \
5658 COND_STACKTRACE("dispatch: "); \
5659 FTRACE(1, "dispatch: {}: {}\n", pcOff(), \
5660 instrToString(opPC, vmfp()->func())); \
5661 DISPATCH_ACTUAL(); \
5662 } while (0)
5664 ONTRACE(1, Trace::trace("dispatch: Enter dispatch(%p)\n",
5665 vmfp()));
5666 PC pc = vmpc();
5667 PC opPC;
5668 DISPATCH();
5670 #define OPCODE_DBG_BODY(name, imm, push, pop, flags) \
5671 phpDebuggerOpcodeHook(opPC)
5672 #define OPCODE_COVER_BODY(name, imm, push, pop, flags) \
5673 if (collectCoverage) { \
5674 recordCodeCoverage(opPC); \
5676 #define OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
5678 if (breakOnCtlFlow && Stats::enableInstrCount()) { \
5679 Stats::inc(Stats::Instr_InterpBB##name); \
5681 if (inlineInterp) { \
5682 switch (callInlineInterpHook()) { \
5683 case InlineInterpHookResult::NONE: break; \
5684 case InlineInterpHookResult::SKIP: \
5685 pc = vmpc(); goto name##Done; \
5686 case InlineInterpHookResult::STOP: \
5687 return JitResumeAddr::none(); \
5690 retAddr = iopWrap##name<breakOnCtlFlow>(pc); \
5691 vmpc() = pc; \
5692 name##Done: \
5693 if (isFCallFunc(Op::name) || \
5694 Op::name == Op::NativeImpl) { \
5695 collectCoverage = checkCoverage(); \
5696 optab = !collectCoverage ? optabDirect : optabCover; \
5697 DEBUGGER_ATTACHED_ONLY(optab = optabDbg); \
5699 if (breakOnCtlFlow) { \
5700 isCtlFlow = instrIsControlFlow(Op::name); \
5702 if (instrCanHalt(Op::name) && UNLIKELY(!pc)) { \
5703 vmfp() = nullptr; \
5704 vmpc() = nullptr; \
5705 /* We returned from the top VM frame in this nesting level. This means
5706 * m_savedRip in our ActRec must have been callToExit, which should've
5707 * been returned by jitReturnPost(), whether or not we were called from
5708 * the TC. We only actually return callToExit to our caller if that
5709 * caller is dispatchBB(). */ \
5710 assertx(isCallToExit((uint64_t)retAddr.arg)); \
5711 return breakOnCtlFlow ? retAddr : JitResumeAddr::none(); \
5713 assertx(isCtlFlow || !retAddr); \
5714 DISPATCH(); \
5717 #ifdef _MSC_VER
5718 DispatchSwitch:
5719 switch (uint8_t(op)) {
5720 #define O(name, imm, push, pop, flags) \
5721 case Op::name: { \
5722 DEBUGGER_ATTACHED_ONLY(OPCODE_DBG_BODY(name, imm, push, pop, flags)); \
5723 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
5724 OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
5726 #else
5727 #define O(name, imm, push, pop, flags) \
5728 LabelDbg##name: \
5729 OPCODE_DBG_BODY(name, imm, push, pop, flags); \
5730 LabelCover##name: \
5731 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
5732 Label##name: \
5733 OPCODE_MAIN_BODY(name, imm, push, pop, flags)
5734 #endif
5736 OPCODES
5738 #ifdef _MSC_VER
5740 #endif
5741 #undef O
5742 #undef DISPATCH
5743 #undef DISPATCH_ACTUAL
5744 #undef OPCODE_DBG_BODY
5745 #undef OPCODE_COVER_BODY
5746 #undef OPCODE_MAIN_BODY
5748 not_reached();
5751 static void dispatch() {
5752 WorkloadStats guard(WorkloadStats::InInterp);
5754 tracing::BlockNoTrace _{"dispatch"};
5756 DEBUG_ONLY auto const retAddr = dispatchImpl<false>();
5757 assertx(!retAddr);
5760 JitResumeAddr dispatchBB() {
5761 auto sk = [] {
5762 return SrcKey(vmfp()->func(), vmpc(), resumeModeFromActRec(vmfp()));
5765 if (Trace::moduleEnabled(Trace::dispatchBB)) {
5766 static auto cat = makeStaticString("dispatchBB");
5767 auto name = makeStaticString(show(sk()));
5768 Stats::incStatGrouped(cat, name, 1);
5770 if (Trace::moduleEnabled(Trace::ringbuffer)) {
5771 Trace::ringbufferEntry(Trace::RBTypeDispatchBB, sk().toAtomicInt(), 0);
5773 return dispatchImpl<true>();
5776 ///////////////////////////////////////////////////////////////////////////////
5777 // Call-threaded entry points
5779 namespace {
5781 constexpr auto do_prof = false;
5783 static BoolProfiler PredictProf("predict"), LookupProf("lookup");
5785 constexpr unsigned NumPredictors = 16; // real cpus have 8-24
5786 static __thread unsigned s_predict{0};
5787 static __thread PcPair s_predictors[NumPredictors];
5788 static void pushPrediction(PcPair p) {
5789 s_predictors[s_predict++ % NumPredictors] = p;
5791 static PcPair popPrediction() {
5792 return s_predictors[--s_predict % NumPredictors];
5795 // callsites quick reference:
5797 // simple opcodes, including throw
5798 // call #addr
5799 // conditional branch
5800 // lea [pc + instrLen(pc)], nextpc_saved
5801 // call #addr
5802 // cmp rdx, nextpc_saved
5803 // jne native-target
5804 // unconditional branch
5805 // call #addr
5806 // jmp native-target
5807 // indirect branch
5808 // call #addr
5809 // jmp rax
5810 // calls w/ return prediction
5811 // lea [pc + instrLen(pc)], nextpc_arg
5812 // call #addr
5813 // jmp rax
5815 NEVER_INLINE void execModeHelper(PC pc, ExecMode modes) {
5816 if (modes & ExecMode::Debugger) phpDebuggerOpcodeHook(pc);
5817 if (modes & ExecMode::Coverage) recordCodeCoverage(pc);
5818 if (modes & ExecMode::BB) {
5819 //Stats::inc(Stats::Instr_InterpBB##name);
5823 template<Op opcode, bool repo_auth, class Iop>
5824 PcPair run(TCA* returnaddr, ExecMode modes, rds::Header* tl, PC nextpc, PC pc,
5825 Iop iop) {
5826 assert(vmpc() == pc);
5827 assert(peek_op(pc) == opcode);
5828 FTRACE(1, "dispatch: {}: {}\n", pcOff(),
5829 instrToString(pc, vmfp()->func()));
5830 if (!repo_auth) {
5831 if (UNLIKELY(modes != ExecMode::Normal)) {
5832 execModeHelper(pc, modes);
5835 DEBUG_ONLY auto origPc = pc;
5836 pc += encoded_op_size(opcode); // skip the opcode
5837 auto retAddr = iop(pc);
5838 vmpc() = pc;
5839 assert(!isThrow(opcode));
5840 if (isSimple(opcode)) {
5841 // caller ignores rax return value, invokes next bytecode
5842 return {nullptr, pc};
5844 if (isBranch(opcode) || isUnconditionalJmp(opcode)) {
5845 // callsites have no ability to indirect-jump out of bytecode.
5846 // so smash the return address to &g_exitCti
5847 // if we need to exit because of dispatchBB() mode.
5848 // TODO: t6019406 use surprise checks to eliminate BB mode
5849 if (modes & ExecMode::BB) {
5850 *returnaddr = g_exitCti;
5851 // FIXME(T115315816): properly handle JitResumeAddr
5852 return {nullptr, (PC)retAddr.handler}; // exit stub will return retAddr
5854 return {nullptr, pc};
5856 // call & indirect branch: caller will jump to address returned in rax
5857 if (instrCanHalt(opcode) && !pc) {
5858 vmfp() = nullptr;
5859 vmpc() = nullptr;
5860 // We returned from the top VM frame in this nesting level. This means
5861 // m_savedRip in our ActRec must have been callToExit, which should've
5862 // been returned by jitReturnPost(), whether or not we were called from
5863 // the TC. We only actually return callToExit to our caller if that
5864 // caller is dispatchBB().
5865 assert(isCallToExit((uint64_t)retAddr.arg));
5866 if (!(modes & ExecMode::BB)) retAddr = JitResumeAddr::none();
5867 // FIXME(T115315816): properly handle JitResumeAddr
5868 return {g_exitCti, (PC)retAddr.handler};
5870 if (instrIsControlFlow(opcode) && (modes & ExecMode::BB)) {
5871 // FIXME(T115315816): properly handle JitResumeAddr
5872 return {g_exitCti, (PC)retAddr.handler};
5874 if (isReturnish(opcode)) {
5875 auto target = popPrediction();
5876 if (do_prof) PredictProf(pc == target.pc);
5877 if (pc == target.pc) return target;
5879 if (isFCall(opcode)) {
5880 // call-like opcodes predict return to next bytecode
5881 assert(nextpc == origPc + instrLen(origPc));
5882 pushPrediction({*returnaddr + kCtiIndirectJmpSize, nextpc});
5884 if (do_prof) LookupProf(pc == vmfp()->func()->entry());
5885 // return ip to jump to, caller will do jmp(rax)
5886 return lookup_cti(vmfp()->func(), pc);
5890 // register assignments inbetween calls to cti opcodes
5891 // rax = target of indirect branch instr (call, switch, etc)
5892 // rdx = pc (passed as 3rd arg register, 2nd return register)
5893 // rbx = next-pc after branch instruction, only if isBranch(op)
5894 // r12 = rds::Header* (vmtl)
5895 // r13 = modes
5896 // r14 = location of return address to cti caller on native stack
5898 #ifdef __clang__
5899 #define DECLARE_FIXED(TL,MODES,RA)\
5900 rds::Header* TL; asm volatile("mov %%r12, %0" : "=r"(TL) ::);\
5901 ExecMode MODES; asm volatile("mov %%r13d, %0" : "=r"(MODES) ::);\
5902 TCA* RA; asm volatile("mov %%r14, %0" : "=r"(RA) ::);
5903 #else
5904 #define DECLARE_FIXED(TL,MODES,RA)\
5905 register rds::Header* TL asm("r12");\
5906 register ExecMode MODES asm("r13");\
5907 register TCA* RA asm("r14");
5908 #endif
5910 namespace cti {
5911 // generate cti::op call-threaded function for each opcode
5912 #define O(opcode, imm, push, pop, flags)\
5913 PcPair opcode(PC nextpc, TCA*, PC pc) {\
5914 DECLARE_FIXED(tl, modes, returnaddr);\
5915 return run<Op::opcode,true>(returnaddr, modes, tl, nextpc, pc,\
5916 [](PC& pc) {\
5917 return iopWrap##opcode<false>(pc);\
5918 });\
5920 OPCODES
5921 #undef O
5923 // generate debug/coverage-capable opcode bodies (for non-repo-auth)
5924 #define O(opcode, imm, push, pop, flags)\
5925 PcPair d##opcode(PC nextpc, TCA*, PC pc) {\
5926 DECLARE_FIXED(tl, modes, returnaddr);\
5927 return run<Op::opcode,false>(returnaddr, modes, tl, nextpc, pc,\
5928 [](PC& pc) {\
5929 return iopWrap##opcode<false>(pc);\
5930 });\
5932 OPCODES
5933 #undef O
5936 // generate table of opcode handler addresses, used by call-threaded emitter
5937 const CodeAddress cti_ops[] = {
5938 #define O(opcode, imm, push, pop, flags) (CodeAddress)&cti::opcode,
5939 OPCODES
5940 #undef O
5942 const CodeAddress ctid_ops[] = {
5943 #define O(opcode, imm, push, pop, flags) (CodeAddress)&cti::d##opcode,
5944 OPCODES
5945 #undef O